diff --git a/.github/_typos.toml b/.github/_typos.toml index 917745e1ae83..e506ebe3a0c4 100644 --- a/.github/_typos.toml +++ b/.github/_typos.toml @@ -16,6 +16,11 @@ extend-exclude = [ "test_code_tokenizer.py", "*response.json", "test_content.txt", + "serializedChatHistoryV1_15_1.json", + "MultipleFunctionsVsParameters.cs", + "PopulationByCountry.csv", + "PopulationByAdmin1.csv", + "WomensSuffrage.txt", ] [default.extend-words] diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml index 366934c73314..034c959e6dde 100644 --- a/.github/workflows/dotnet-build-and-test.yml +++ b/.github/workflows/dotnet-build-and-test.yml @@ -21,6 +21,7 @@ concurrency: permissions: contents: read + id-token: "write" jobs: paths-filter: @@ -57,11 +58,13 @@ jobs: os: "ubuntu-latest", configuration: Release, integration-tests: true, + environment: "integration", } - { dotnet: "8.0", os: "windows-latest", configuration: Debug } - { dotnet: "8.0", os: "windows-latest", configuration: Release } runs-on: ${{ matrix.os }} + environment: ${{ matrix.environment }} steps: - uses: actions/checkout@v4 - name: Setup dotnet ${{ matrix.dotnet }} @@ -84,6 +87,14 @@ jobs: dotnet test -c ${{ matrix.configuration }} $project --no-build -v Normal --logger trx --collect:"XPlat Code Coverage" --results-directory:"TestResults/Coverage/" -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.ExcludeByAttribute=GeneratedCodeAttribute,CompilerGeneratedAttribute,ExcludeFromCodeCoverageAttribute done + - name: Azure CLI Login + if: github.event_name != 'pull_request' && matrix.integration-tests + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + - name: Run Integration Tests shell: bash if: github.event_name != 'pull_request' && matrix.integration-tests @@ -96,6 +107,7 @@ jobs: AzureOpenAI__Label: azure-text-davinci-003 AzureOpenAIEmbedding__Label: azure-text-embedding-ada-002 AzureOpenAI__DeploymentName: ${{ vars.AZUREOPENAI__DEPLOYMENTNAME }} + AzureOpenAI__ChatDeploymentName: ${{ vars.AZUREOPENAI__CHATDEPLOYMENTNAME }} AzureOpenAIEmbeddings__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDING__DEPLOYMENTNAME }} AzureOpenAI__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }} AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI_EASTUS__ENDPOINT }} @@ -110,23 +122,31 @@ jobs: OpenAITextToAudio__ModelId: ${{ vars.OPENAITEXTTOAUDIO__MODELID }} OpenAIAudioToText__ApiKey: ${{ secrets.OPENAIAUDIOTOTEXT__APIKEY }} OpenAIAudioToText__ModelId: ${{ vars.OPENAIAUDIOTOTEXT__MODELID }} + OpenAITextToImage__ApiKey: ${{ secrets.OPENAITEXTTOIMAGE__APIKEY }} + OpenAITextToImage__ModelId: ${{ vars.OPENAITEXTTOIMAGE__MODELID }} AzureOpenAITextToAudio__ApiKey: ${{ secrets.AZUREOPENAITEXTTOAUDIO__APIKEY }} AzureOpenAITextToAudio__Endpoint: ${{ secrets.AZUREOPENAITEXTTOAUDIO__ENDPOINT }} AzureOpenAITextToAudio__DeploymentName: ${{ vars.AZUREOPENAITEXTTOAUDIO__DEPLOYMENTNAME }} AzureOpenAIAudioToText__ApiKey: ${{ secrets.AZUREOPENAIAUDIOTOTEXT__APIKEY }} AzureOpenAIAudioToText__Endpoint: ${{ secrets.AZUREOPENAIAUDIOTOTEXT__ENDPOINT }} AzureOpenAIAudioToText__DeploymentName: ${{ vars.AZUREOPENAIAUDIOTOTEXT__DEPLOYMENTNAME }} + AzureOpenAITextToImage__ApiKey: ${{ secrets.AZUREOPENAITEXTTOIMAGE__APIKEY }} + AzureOpenAITextToImage__Endpoint: ${{ secrets.AZUREOPENAITEXTTOIMAGE__ENDPOINT }} + AzureOpenAITextToImage__DeploymentName: ${{ vars.AZUREOPENAITEXTTOIMAGE__DEPLOYMENTNAME }} Bing__ApiKey: ${{ secrets.BING__APIKEY }} OpenAI__ApiKey: ${{ secrets.OPENAI__APIKEY }} + OpenAI__ChatModelId: ${{ vars.OPENAI__CHATMODELID }} + AzureAIInference__ApiKey: ${{ secrets.AZUREAIINFERENCE__APIKEY }} + AzureAIInference__Endpoint: ${{ secrets.AZUREAIINFERENCE__ENDPOINT }} # Generate test reports and check coverage - name: Generate test reports - uses: danielpalme/ReportGenerator-GitHub-Action@5.3.8 + uses: danielpalme/ReportGenerator-GitHub-Action@5.3.9 with: reports: "./TestResults/Coverage/**/coverage.cobertura.xml" targetdir: "./TestResults/Reports" reporttypes: "JsonSummary" - assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Yaml;+Microsoft.SemanticKernel.Agents.Abstractions;+Microsoft.SemanticKernel.Agents.Core;+Microsoft.SemanticKernel.Agents.OpenAI" + assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Connectors.AzureOpenAI;+Microsoft.SemanticKernel.Yaml;+Microsoft.SemanticKernel.Agents.Abstractions;+Microsoft.SemanticKernel.Agents.Core;+Microsoft.SemanticKernel.Agents.OpenAI" - name: Check coverage shell: pwsh diff --git a/.github/workflows/python-build-wheel.yml b/.github/workflows/python-build-wheel.yml deleted file mode 100644 index 5752dee8ace9..000000000000 --- a/.github/workflows/python-build-wheel.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: python-build-wheel - -on: - workflow_dispatch: - push: - branches: [ "python_preview" ] - -permissions: - contents: read - -jobs: - build-wheel: - - runs-on: ubuntu-latest - - defaults: - run: - working-directory: python - - steps: - - uses: actions/checkout@v4 - - - run: echo "/root/.local/bin" >> $GITHUB_PATH - - - name: Install poetry - run: pipx install poetry - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.10' - cache: "poetry" - cache-dependency-path: "python/pyproject.toml" - - - name: Install Semantic Kernel - run: poetry install --no-ansi - - - name: Build wheel - run: poetry build - - - name: Upload wheel file to artifacts - uses: actions/upload-artifact@v4 - with: - name: dist - path: python/dist/* diff --git a/.github/workflows/python-build.yml b/.github/workflows/python-build.yml new file mode 100644 index 000000000000..6fbb810bae2f --- /dev/null +++ b/.github/workflows/python-build.yml @@ -0,0 +1,30 @@ +name: Python Build Assets + +on: + release: + types: [published] + +jobs: + python-build-assets: + if: github.event_name == 'release' && startsWith(github.event.release.tag_name, 'python-') + name: Python Build Assets and add to Release + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Set up uv + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Check version + run: | + echo "Building and uploading Python package version: ${{ github.event.release.tag_name }}" + - name: Build the package + run: cd python && make build + - name: Release + uses: softprops/action-gh-release@v2 + with: + files: | + python/dist/* diff --git a/.github/workflows/python-integration-tests.yml b/.github/workflows/python-integration-tests.yml index 1b1a44fa1bb6..0655d40a03cd 100644 --- a/.github/workflows/python-integration-tests.yml +++ b/.github/workflows/python-integration-tests.yml @@ -15,6 +15,11 @@ on: permissions: contents: read + id-token: "write" + +env: + # Configure a constant location for the uv cache + UV_CACHE_DIR: /tmp/.uv-cache jobs: paths-filter: @@ -42,34 +47,45 @@ jobs: name: Python Pre-Merge Integration Tests needs: paths-filter if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true' - runs-on: ${{ matrix.os }} strategy: max-parallel: 1 fail-fast: false matrix: python-version: ["3.11"] os: [ubuntu-latest] + defaults: + run: + working-directory: python + runs-on: ${{ matrix.os }} + environment: "integration" steps: - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry + - name: Set up uv + if: ${{ matrix.os == 'ubuntu-latest' || matrix.os == 'macos-latest' }} + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up uv + if: ${{ matrix.os == 'windows-latest' }} + run: irm https://astral.sh/uv/install.ps1 | iex + shell: powershell - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: "poetry" + - name: Restore uv cache + id: cache + uses: actions/cache@v4 + with: + path: ${{ env.UV_CACHE_DIR }} + key: uv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/uv.lock') }} - name: Install dependencies with hnswlib native disabled if: matrix.os == 'macos-latest' && matrix.python-version == '3.11' run: | export HNSWLIB_NO_NATIVE=1 - python -m pip install --upgrade pip setuptools wheel - cd python && poetry install --with tests + uv sync --all-extras --dev - name: Install dependencies with hnswlib native enabled if: matrix.os != 'macos-latest' || matrix.python-version != '3.11' run: | - python -m pip install --upgrade pip setuptools wheel - cd python - poetry install --with tests + uv sync --all-extras --dev - name: Install Ollama if: matrix.os == 'ubuntu-latest' run: | @@ -95,10 +111,17 @@ jobs: - name: Setup Redis Stack Server if: matrix.os == 'ubuntu-latest' run: docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest + - name: Azure CLI Login + if: github.event_name != 'pull_request' + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - name: Run Integration Tests id: run_tests shell: bash - env: # Set Azure credentials secret as an input + env: HNSWLIB_NO_NATIVE: 1 Python_Integration_Tests: Python_Integration_Tests AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME }} # azure-text-embedding-ada-002 @@ -106,7 +129,6 @@ jobs: AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }} AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }} AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} - AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} BING_API_KEY: ${{ secrets.BING_API_KEY }} OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }} OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }} @@ -115,6 +137,7 @@ jobs: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} PINECONE_API_KEY: ${{ secrets.PINECONE__APIKEY }} POSTGRES_CONNECTION_STRING: ${{secrets.POSTGRES__CONNECTIONSTR}} + POSTGRES_MAX_POOL: ${{ vars.POSTGRES_MAX_POOL }} AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}} AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}} MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}} @@ -136,84 +159,97 @@ jobs: VERTEX_AI_EMBEDDING_MODEL_ID: ${{ vars.VERTEX_AI_EMBEDDING_MODEL_ID }} REDIS_CONNECTION_STRING: ${{ vars.REDIS_CONNECTION_STRING }} run: | - cd python - poetry run pytest -n logical --dist loadfile --dist worksteal ./tests/integration ./tests/samples -v --junitxml=pytest.xml + uv run pytest -n logical --dist loadfile --dist worksteal ./tests/integration ./tests/samples -v --junitxml=pytest.xml - name: Surface failing tests if: always() uses: pmeier/pytest-results-action@main with: - # A list of JUnit XML files, directories containing the former, and wildcard - # patterns to process. - # See @actions/glob for supported patterns. path: python/pytest.xml - # (Optional) Add a summary of the results at the top of the report summary: true - # (Optional) Select which results should be included in the report. - # Follows the same syntax as `pytest -r` display-options: fEX - # (Optional) Fail the workflow if no JUnit XML was found. fail-on-empty: true - # (Optional) Title of the test results section in the workflow summary title: Test results + - name: Minimize uv cache + run: uv cache prune --ci python-integration-tests: needs: paths-filter if: (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && needs.paths-filter.outputs.pythonChanges == 'true' - runs-on: ${{ matrix.os }} strategy: max-parallel: 1 fail-fast: false matrix: python-version: ["3.10", "3.11", "3.12"] os: [ubuntu-latest, windows-latest, macos-latest] + defaults: + run: + working-directory: python + runs-on: ${{ matrix.os }} + environment: "integration" steps: - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry + - name: Set up uv + if: ${{ matrix.os == 'ubuntu-latest' || matrix.os == 'macos-latest' }} + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up uv + if: ${{ matrix.os == 'windows-latest' }} + run: irm https://astral.sh/uv/install.ps1 | iex + shell: powershell - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: "poetry" + - name: Restore uv cache + id: cache + uses: actions/cache@v4 + with: + path: ${{ env.UV_CACHE_DIR }} + key: uv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/uv.lock') }} - name: Install dependencies with hnswlib native disabled if: matrix.os == 'macos-latest' && matrix.python-version == '3.11' run: | export HNSWLIB_NO_NATIVE=1 - python -m pip install --upgrade pip setuptools wheel - cd python && poetry install --with tests - + uv sync --all-extras --dev - name: Install dependencies with hnswlib native enabled if: matrix.os != 'macos-latest' || matrix.python-version != '3.11' run: | - python -m pip install --upgrade pip setuptools wheel - cd python && poetry install --with tests - + uv sync --all-extras --dev - name: Install Ollama if: matrix.os == 'ubuntu-latest' run: | - curl -fsSL https://ollama.com/install.sh | sh - ollama serve & - sleep 5 - + if ${{ vars.OLLAMA_MODEL != '' }}; then + curl -fsSL https://ollama.com/install.sh | sh + ollama serve & + sleep 5 + fi - name: Pull model in Ollama if: matrix.os == 'ubuntu-latest' run: | - ollama pull ${{ vars.OLLAMA_MODEL }} - ollama list - + if ${{ vars.OLLAMA_MODEL != '' }}; then + ollama pull ${{ vars.OLLAMA_MODEL }} + ollama list + fi - name: Google auth uses: google-github-actions/auth@v2 with: project_id: ${{ vars.VERTEX_AI_PROJECT_ID }} credentials_json: ${{ secrets.VERTEX_AI_SERVICE_ACCOUNT_KEY }} - - name: Set up gcloud uses: google-github-actions/setup-gcloud@v2 - + - name: Setup Redis Stack Server + if: matrix.os == 'ubuntu-latest' + run: docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest + - name: Azure CLI Login + if: github.event_name != 'pull_request' + uses: azure/login@v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} - name: Run Integration Tests id: run_tests shell: bash - env: # Set Azure credentials secret as an input + env: HNSWLIB_NO_NATIVE: 1 Python_Integration_Tests: Python_Integration_Tests AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME }} # azure-text-embedding-ada-002 @@ -221,7 +257,6 @@ jobs: AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }} AZURE_OPENAI_API_VERSION: ${{ vars.AZURE_OPENAI_API_VERSION }} AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} - AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} BING_API_KEY: ${{ secrets.BING_API_KEY }} OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI_CHAT_MODEL_ID }} OPENAI_TEXT_MODEL_ID: ${{ vars.OPENAI_TEXT_MODEL_ID }} @@ -230,6 +265,7 @@ jobs: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} PINECONE_API_KEY: ${{ secrets.PINECONE__APIKEY }} POSTGRES_CONNECTION_STRING: ${{secrets.POSTGRES__CONNECTIONSTR}} + POSTGRES_MAX_POOL: ${{ vars.POSTGRES_MAX_POOL }} AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}} AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}} MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}} @@ -240,6 +276,8 @@ jobs: MISTRALAI_API_KEY: ${{secrets.MISTRALAI_API_KEY}} MISTRALAI_CHAT_MODEL_ID: ${{ vars.MISTRALAI_CHAT_MODEL_ID }} MISTRALAI_EMBEDDING_MODEL_ID: ${{ vars.MISTRALAI_EMBEDDING_MODEL_ID }} + ANTHROPIC_API_KEY: ${{secrets.ANTHROPIC_API_KEY}} + ANTHROPIC_CHAT_MODEL_ID: ${{ vars.ANTHROPIC_CHAT_MODEL_ID }} OLLAMA_MODEL: "${{ matrix.os == 'ubuntu-latest' && vars.OLLAMA_MODEL || '' }}" # phi3 GOOGLE_AI_GEMINI_MODEL_ID: ${{ vars.GOOGLE_AI_GEMINI_MODEL_ID }} GOOGLE_AI_EMBEDDING_MODEL_ID: ${{ vars.GOOGLE_AI_EMBEDDING_MODEL_ID }} @@ -248,16 +286,19 @@ jobs: VERTEX_AI_GEMINI_MODEL_ID: ${{ vars.VERTEX_AI_GEMINI_MODEL_ID }} VERTEX_AI_EMBEDDING_MODEL_ID: ${{ vars.VERTEX_AI_EMBEDDING_MODEL_ID }} REDIS_CONNECTION_STRING: ${{ vars.REDIS_CONNECTION_STRING }} - ANTHROPIC_API_KEY: ${{secrets.ANTHROPIC_API_KEY}} - ANTHROPIC_CHAT_MODEL_ID: ${{ vars.ANTHROPIC_CHAT_MODEL_ID }} run: | - if ${{ matrix.os == 'ubuntu-latest' }}; then - docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest - fi - - cd python - poetry run pytest -n logical --dist loadfile --dist worksteal ./tests/integration -v - poetry run pytest -n logical --dist loadfile --dist worksteal ./tests/samples -v + uv run pytest -n logical --dist loadfile --dist worksteal ./tests/integration ./tests/samples -v --junitxml=pytest.xml + - name: Surface failing tests + if: always() + uses: pmeier/pytest-results-action@main + with: + path: python/pytest.xml + summary: true + display-options: fEX + fail-on-empty: true + title: Test results + - name: Minimize uv cache + run: uv cache prune --ci # This final job is required to satisfy the merge queue. It must only run (or succeed) if no tests failed python-integration-tests-check: diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml index 3f20ae2f0d02..39549589e69f 100644 --- a/.github/workflows/python-lint.yml +++ b/.github/workflows/python-lint.yml @@ -15,14 +15,29 @@ jobs: python-version: ["3.10"] runs-on: ubuntu-latest continue-on-error: true + defaults: + run: + working-directory: python + env: + # Configure a constant location for the uv cache + UV_CACHE_DIR: /tmp/.uv-cache steps: - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry - - uses: actions/setup-python@v5 + - name: Set up uv + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: "poetry" - - name: Install dependencies - run: cd python && poetry install + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: ${{ env.UV_CACHE_DIR }} + key: uv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/uv.lock') }} + - name: Install the project + run: uv sync --all-extras --dev - uses: pre-commit/action@v3.0.1 + with: + extra_args: --config python/.pre-commit-config.yaml + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.github/workflows/python-unit-tests.yml b/.github/workflows/python-unit-tests.yml index 4137270c3796..85e7f8090e3e 100644 --- a/.github/workflows/python-unit-tests.yml +++ b/.github/workflows/python-unit-tests.yml @@ -5,6 +5,9 @@ on: branches: ["main", "feature*"] paths: - "python/**" +env: + # Configure a constant location for the uv cache + UV_CACHE_DIR: /tmp/.uv-cache jobs: python-unit-tests: @@ -28,34 +31,38 @@ jobs: working-directory: python steps: - uses: actions/checkout@v4 - - name: Install poetry - run: pipx install poetry + - name: Set up uv + if: ${{ matrix.os == 'ubuntu-latest' || matrix.os == 'macos-latest' }} + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up uv + if: ${{ matrix.os == 'windows-latest' }} + run: irm https://astral.sh/uv/install.ps1 | iex + shell: powershell - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - cache: "poetry" - - name: Install dependencies - run: poetry install --with unit-tests + - name: Restore uv cache + id: cache + uses: actions/cache@v4 + with: + path: ${{ env.UV_CACHE_DIR }} + key: uv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/uv.lock') }} + - name: Install the project + run: uv sync --all-extras --dev - name: Test with pytest - run: poetry run pytest --junitxml=pytest.xml ./tests/unit + run: uv run pytest --junitxml=pytest.xml ./tests/unit - name: Surface failing tests if: always() uses: pmeier/pytest-results-action@main with: - # A list of JUnit XML files, directories containing the former, and wildcard - # patterns to process. - # See @actions/glob for supported patterns. path: python/pytest.xml - # (Optional) Add a summary of the results at the top of the report summary: true - # (Optional) Select which results should be included in the report. - # Follows the same syntax as `pytest -r` display-options: fEX - # (Optional) Fail the workflow if no JUnit XML was found. fail-on-empty: true - # (Optional) Title of the test results section in the workflow summary title: Test results + - name: Minimize uv cache + run: uv cache prune --ci python-test-coverage: name: Python Test Coverage runs-on: [ubuntu-latest] @@ -65,21 +72,27 @@ jobs: defaults: run: working-directory: python + env: + PYTHON_VERSION: "3.10" steps: - uses: actions/checkout@v4 - name: Setup filename variables run: echo "FILE_ID=${{ github.event.number }}" >> $GITHUB_ENV - - name: Install poetry - run: pipx install poetry - - name: Set up Python 3.10 + - name: Set up uv + run: curl -LsSf https://astral.sh/uv/install.sh | sh + - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@v5 with: - python-version: "3.10" - cache: "poetry" - - name: Install dependencies - run: poetry install --with unit-tests + python-version: ${{ env.PYTHON_VERSION }} + - name: Restore uv cache + uses: actions/cache@v4 + with: + path: ${{ env.UV_CACHE_DIR }} + key: uv-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ hashFiles('**/uv.lock') }} + - name: Install the project + run: uv sync --all-extras --dev - name: Test with pytest - run: poetry run pytest -q --junitxml=pytest.xml --cov=semantic_kernel --cov-report=term-missing:skip-covered ./tests/unit | tee python-coverage.txt + run: uv run pytest -q --junitxml=pytest.xml --cov=semantic_kernel --cov-report=term-missing:skip-covered ./tests/unit | tee python-coverage.txt - name: Upload coverage if: always() uses: actions/upload-artifact@v4 @@ -96,3 +109,5 @@ jobs: path: python/pytest.xml overwrite: true retention-days: 1 + - name: Minimize uv cache + run: uv cache prune --ci diff --git a/.gitignore b/.gitignore index d37a856dbc26..3912623b85f8 100644 --- a/.gitignore +++ b/.gitignore @@ -461,6 +461,7 @@ env/ venv/ myvenv/ ENV/ +.venv*/ # Python dist dist/ diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 91ff88105299..28d543bbc52b 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -8,7 +8,10 @@ "label": "setup (contributing-R#)", "detail": "", "group": "build", - "dependsOn": ["new tool-manifest", "# Setup"], + "dependsOn": [ + "new tool-manifest", + "# Setup" + ], "dependsOrder": "sequence" }, { @@ -16,7 +19,10 @@ "detail": "Install ReSharper Global Tools", "command": "dotnet", "type": "process", - "args": ["new", "tool-manifest"], + "args": [ + "new", + "tool-manifest" + ], "options": { "cwd": "${workspaceFolder}/dotnet" } @@ -88,54 +94,6 @@ ], "dependsOrder": "sequence" }, - // ***************************** - // Contributing (python) - Setup - // ***************************** - { - "label": "setup (contributing-python)", - "detail": "", - "group": "build", - "dependsOn": ["install poetry", "install python packages"], - "dependsOrder": "sequence" - }, - { - "label": "install poetry", - "detail": "Install poetry", - "command": "pip3", - "type": "shell", - "args": ["install", "poetry"], - "options": { - "cwd": "${workspaceFolder}/python" - } - }, - { - "label": "install python packages", - "detail": "Install python packages", - "command": "poetry", - "type": "shell", - "args": ["install"], - "options": { - "cwd": "${workspaceFolder}/python" - } - }, - // Formatting - { - "label": "validate (contributing-python)", - "command": "poetry", - "type": "shell", - "group": "build", - "args": [ - "run", - "pre-commit", - "run", - "-c", - ".conf/.pre-commit-config.yaml", - "-a" - ], - "options": { - "cwd": "${workspaceFolder}/python" - } - }, // *************** // Kernel (dotnet) // *************** @@ -163,7 +121,10 @@ "label": "test (Semantic-Kernel)", "command": "dotnet", "type": "process", - "args": ["test", "SemanticKernel.UnitTests.csproj"], + "args": [ + "test", + "SemanticKernel.UnitTests.csproj" + ], "problemMatcher": "$msCompile", "group": "test", "presentation": { @@ -271,58 +232,6 @@ } }, // **************** - // Kernel (python) - // **************** - // Test - { - "label": "test (Semantic-Kernel-Python)", - "command": "poetry", - "type": "shell", - "args": ["run", "pytest", "tests/unit"], - "problemMatcher": "$msCompile", - "group": "test", - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "PR-Validate" - }, - "options": { - "cwd": "${workspaceFolder}/python" - } - }, - { - "label": "test (Semantic-Kernel-Python Integration)", - "command": "poetry", - "type": "shell", - "args": ["run", "pytest", "tests/integration", "-k", "${input:filter}"], - "problemMatcher": "$msCompile", - "group": "test", - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "PR-Validate" - }, - "options": { - "cwd": "${workspaceFolder}/python" - } - }, - { - "label": "test (Semantic-Kernel-Python ALL)", - "command": "poetry", - "type": "shell", - "args": ["run", "pytest", "tests", "-k", "${input:filter}"], - "problemMatcher": "$msCompile", - "group": "test", - "presentation": { - "reveal": "always", - "panel": "shared", - "group": "PR-Validate" - }, - "options": { - "cwd": "${workspaceFolder}/python" - } - }, - // **************** // Samples (dotnet) // **************** // Kernel Syntax Examples @@ -380,4 +289,4 @@ "description": "Enter a filter to pass as argument or filter" } ] -} +} \ No newline at end of file diff --git a/README.md b/README.md index 2cc88b643a4a..8e7f321739cc 100644 --- a/README.md +++ b/README.md @@ -111,7 +111,7 @@ on our Learn site. Each sample comes with a completed C# and Python project that 1. ๐Ÿ“– [Getting Started](https://learn.microsoft.com/en-us/semantic-kernel/get-started/quick-start-guide) 1. ๐Ÿ”Œ [Detailed Samples](https://learn.microsoft.com/en-us/semantic-kernel/get-started/detailed-samples) -1. ๐Ÿ’ก [Concepts](https://learn.microsoft.com/en-us/semantic-kernel/concepts/agents) +1. ๐Ÿ’ก [Concepts](https://learn.microsoft.com/en-us/semantic-kernel/concepts/kernel) Finally, refer to our API references for more details on the C# and Python APIs: @@ -136,7 +136,7 @@ in a different direction, but also to consider the impact on the larger ecosyste To learn more and get started: - Read the [documentation](https://aka.ms/sk/learn) -- Learn how to [contribute](https://learn.microsoft.com/en-us/semantic-kernel/get-started/contributing) to the project +- Learn how to [contribute](https://learn.microsoft.com/en-us/semantic-kernel/support/contributing) to the project - Ask questions in the [GitHub discussions](https://github.com/microsoft/semantic-kernel/discussions) - Ask questions in the [Discord community](https://aka.ms/SKDiscord) diff --git a/TRANSPARENCY_FAQS.md b/TRANSPARENCY_FAQS.md index a891ec68ec28..e390229442a2 100644 --- a/TRANSPARENCY_FAQS.md +++ b/TRANSPARENCY_FAQS.md @@ -5,7 +5,6 @@ Microsoft Semantic Kernel is a lightweight, open-source development kit designed It serves as efficient middleware that supports developers in building AI agents, automating business processes, and connecting their code with the latest AI technologies. Input to this system can range from text data to structured commands, and it produces various outputs, including natural language responses, function calls, and other actionable data. - ## What can Microsoft Semantic Kernel do? Building upon its foundational capabilities, Microsoft Semantic Kernel facilitates several functionalities: - AI Agent Development: Users can create agents capable of performing specific tasks or interactions based on user input. @@ -15,7 +14,6 @@ Building upon its foundational capabilities, Microsoft Semantic Kernel facilitat - Filtering: Developers can use filters to monitor the application, control function invocation or implement Responsible AI. - Prompt Templates: Developer can define their prompts using various template languages including Handlebars and Liquid or the built-in Semantic Kernel format. - ## What is/are Microsoft Semantic Kernelโ€™s intended use(s)? The intended uses of Microsoft Semantic Kernel include: - Production Ready Applications: Building small to large enterprise scale solutions that can leverage advanced AI models capabilities. @@ -51,7 +49,6 @@ Operational factors and settings for optimal use include: - Real-Time Monitoring: System behavior should be regularly monitored to detect unexpected patterns or malfunctions promptly. - Incorporate RAI and safety tools like Prompt Shield with filters to ensure responsible use. - ### Plugins and Extensibility #### What are plugins and how does Microsoft Semantic Kernel use them? @@ -68,3 +65,14 @@ Potential issues that may arise include: - Invocation Failures: Incorrectly triggered plugins can result in unexpected outputs. - Output Misinformation: Errors in plugin handling can lead to generation of inaccurate or misleading results. - Dependency Compatibility: Changes in external dependencies may affect plugin functionality. To prevent these issues, users are advised to keep plugins updated and to rigorously test their implementations for stability and accuracy + +#### When working with AI, the developer can enable content moderation in the AI platforms used, and has complete control on the prompts being used, including the ability to define responsible boundaries and guidelines. For instance: +- When using Azure OpenAI, by default the service includes a content filtering system that works alongside core models. This system works by running both the prompt and completion through an ensemble of classification models aimed at detecting and preventing the output of harmful content. In addition to the content filtering system, the Azure OpenAI Service performs monitoring to detect content and/or behaviors that suggest use of the service in a manner that might violate applicable product terms. The filter configuration can be adjusted, for example to block also "low severity level" content. See here for more information. +- The developer can integrate Azure AI Content Safety to detect harmful user-generated and AI-generated content, including text and images. The service includes an interactive Studio online tool with templates and customized workflows. See here for more information. +- When using OpenAI the developer can integrate OpenAI Moderation to identify problematic content and take action, for instance by filtering it. See here for more information. +- Other AI providers provide content moderation and moderation APIs, which developers can integrate with Node Engine. + +#### If a sequence of components are run, additional risks/failures may arise when using non-deterministic behavior. To mitigate this, developers can: +Implement safety measures and bounds on each component to prevent undesired outcomes. +Add output to the user to maintain control and awareness of the system's state. +In multi-agent scenarios, build in places that prompt the user for a response, ensuring user involvement and reducing the likelihood of undesired results due to multi-agent looping. diff --git a/docs/PLANNERS.md b/docs/PLANNERS.md index 9ff63664adca..538e21569d64 100644 --- a/docs/PLANNERS.md +++ b/docs/PLANNERS.md @@ -2,4 +2,4 @@ This document has been moved to the Semantic Kernel Documentation site. You can find it by navigating to the [Automatically orchestrate AI with planner](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/planner) page. -To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/ai-orchestration/planner.md) +To make an update on the page, file a PR on the [docs repo.](https://github.com/MicrosoftDocs/semantic-kernel-docs/blob/main/semantic-kernel/concepts/planning.md) \ No newline at end of file diff --git a/docs/decisions/0034-rag-in-sk.md b/docs/decisions/0034-rag-in-sk.md index f94757179176..2d010e0a8503 100644 --- a/docs/decisions/0034-rag-in-sk.md +++ b/docs/decisions/0034-rag-in-sk.md @@ -143,7 +143,7 @@ var result = await kernel.InvokePromptAsync("{{budgetByYear}} What is my budget This approach is similar to Option 1, but data search step is part of prompt rendering process. Following list contains possible plugins to use for data search: - [ChatGPT Retrieval Plugin](https://github.com/openai/chatgpt-retrieval-plugin) - this plugin should be hosted as a separate service. It has integration with various [vector databases](https://github.com/openai/chatgpt-retrieval-plugin?tab=readme-ov-file#choosing-a-vector-database). -- [SemanticKernel.Plugins.Memory.TextMemoryPlugin](https://www.nuget.org/packages/Microsoft.SemanticKernel.Plugins.Memory) - Semantic Kernel solution, which supports various [vector databases](https://learn.microsoft.com/en-us/semantic-kernel/memories/vector-db#available-connectors-to-vector-databases). +- [SemanticKernel.Plugins.Memory.TextMemoryPlugin](https://www.nuget.org/packages/Microsoft.SemanticKernel.Plugins.Memory) - Semantic Kernel solution, which supports various vector databases. - Custom user plugin. ChatGPT Retrieval Plugin: diff --git a/docs/decisions/0051-dotnet-azure-model-as-a-service.md b/docs/decisions/0051-dotnet-azure-model-as-a-service.md new file mode 100644 index 000000000000..b023838d5128 --- /dev/null +++ b/docs/decisions/0051-dotnet-azure-model-as-a-service.md @@ -0,0 +1,46 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: rogerbarreto +date: 2024-08-07 +deciders: rogerbarreto, markwallace-microsoft +consulted: taochen +--- + +# Support Connector for .Net Azure Model-as-a-Service (Azure AI Studio) + +## Context and Problem Statement + +There has been a demand from customers to use and support natively models deployed in [Azure AI Studio - Serverless APIs](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/model-catalog-overview#model-deployment-managed-compute-and-serverless-api-pay-as-you-go), This mode of consumption operates on a pay-as-you-go basis, typically using tokens for billing purposes. Clients can access the service via the [Azure AI Model Inference API](https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-api?tabs=azure-studio) or client SDKs. + +At present, there is no official support for [Azure AI Studio](https://learn.microsoft.com/en-us/azure/ai-studio/what-is-ai-studio). The purpose of this ADR is to examine the constraints of the service and explore potential solutions to enable support for the service via the development of a new AI connector. + +## Azure Inference Client library for .NET + +The Azure team has a new client library, namely [Azure.AI.Inference](https://github.com/Azure/azure-sdk-for-net/blob/Azure.AI.Inference_1.0.0-beta.1/sdk/ai/Azure.AI.Inference/README.md) in .Net, for effectively interacting with the service. While the service API is OpenAI-compatible, it is not permissible to use the OpenAI and the Azure OpenAI client libraries for interacting with the service as they are not independent with respect to both the models and their providers. This is because Azure AI Studio features a diverse range of open-source models, other than OpenAI models. + +### Limitations + +Currently is known that the first version of the client SDK will only support: `Chat Completion` and `Text Embedding Generation` and `Image Embedding Generation` with `TextToImage Generation` planned. + +There are no current plans to support `Text Generation` modality. + +## AI Connector + +### Namespace options + +- `Microsoft.SemanticKernel.Connectors.AzureAI` +- `Microsoft.SemanticKernel.Connectors.AzureAIInference` +- `Microsoft.SemanticKernel.Connectors.AzureAIModelInference` + +Decision: `Microsoft.SemanticKernel.Connectors.AzureAIInference` + +### Support for model-specific parameters + +Models can possess supplementary parameters that are not part of the default API. The service API and the client SDK enable the provision of model-specific parameters. Users can provide model-specific settings via a dedicated argument along with other settings, such as `temperature` and `top_p`, among others. + +Azure AI Inference specialized `PromptExecutionSettings`, will support those customizable parameters. + +### Feature Branch + +The development of the Azure AI Inference connector will be done in a feature branch named `feature-connectors-azureaiinference`. diff --git a/docs/decisions/0051-entity-framework-as-connector.md b/docs/decisions/0051-entity-framework-as-connector.md new file mode 100644 index 000000000000..f4a5b8f5d9b7 --- /dev/null +++ b/docs/decisions/0051-entity-framework-as-connector.md @@ -0,0 +1,119 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: dmytrostruk +date: 2024-08-20 +deciders: sergeymenshykh, markwallace, rbarreto, westey-m +--- + +# Entity Framework as Vector Store Connector + +## Context and Problem Statement + +This ADR contains investigation results about adding Entity Framework as Vector Store connector to the Semantic Kernel codebase. + +Entity Framework is a modern object-relation mapper that allows to build a clean, portable, and high-level data access layer with .NET (C#) across a variety of databases, including SQL Database (on-premises and Azure), SQLite, MySQL, PostgreSQL, Azure Cosmos DB and more. It supports LINQ queries, change tracking, updates and schema migrations. + +One of the huge benefits of Entity Framework for Semantic Kernel is the support of multiple databases. In theory, one Entity Framework connector can work as a hub to multiple databases at the same time, which should simplify the development and maintenance of integration with these databases. + +However, there are some limitations, which won't allow Entity Framework to fit in updated Vector Store design. + +### Collection Creation + +In new Vector Store design, interface `IVectorStoreRecordCollection` contains methods to manipulate with database collections: +- `CollectionExistsAsync` +- `CreateCollectionAsync` +- `CreateCollectionIfNotExistsAsync` +- `DeleteCollectionAsync` + +In Entity Framework, collection (also known as schema/table) creation using programmatic approach is not recommended in production scenarios. The recommended approach is to use Migrations (in case of code-first approach), or to use Reverse Engineering (also known as scaffolding/database-first approach). Programmatic schema creation is recommended only for testing/local scenarios. Also, collection creation process differs for different databases. For example, MongoDB EF Core provider doesn't support schema migrations or database-first/model-first approaches. Instead, the collection is created automatically when a document is inserted for the first time, if collection doesn't already exist. This brings the complexity around methods such as `CreateCollectionAsync` from `IVectorStoreRecordCollection` interface, since there is no abstraction around collection management in EF that will work for most databases. For such cases, the recommended approach is to rely on automatic creation or handle collection creation individually for each database. As an example, in MongoDB it's recommended to use MongoDB C# Driver directly. + +Sources: +- https://learn.microsoft.com/en-us/ef/core/managing-schemas/ +- https://learn.microsoft.com/en-us/ef/core/managing-schemas/ensure-created +- https://learn.microsoft.com/en-us/ef/core/managing-schemas/migrations/applying?tabs=dotnet-core-cli#apply-migrations-at-runtime +- https://github.com/mongodb/mongo-efcore-provider?tab=readme-ov-file#not-supported--out-of-scope-features + +### Key Management + +It won't be possible to define one set of valid key types, since not all databases support all types as keys. In such case, it will be possible to support only standard type for keys such as `string`, and then the conversion should be performed to satisfy key restrictions for specific database. This removes the advantage of unified connector implementation, since key management should be handled for each database individually. + +Sources: +- https://learn.microsoft.com/en-us/ef/core/modeling/keys?tabs=data-annotations + +### Vector Management + +`ReadOnlyMemory` type, which is used in most SK connectors today to hold embeddings is not supported in Entity Framework out-of-the-box. When trying to use this type, the following error occurs: + +``` +The property '{Property Name}' could not be mapped because it is of type 'ReadOnlyMemory?', which is not a supported primitive type or a valid entity type. Either explicitly map this property, or ignore it using the '[NotMapped]' attribute or by using 'EntityTypeBuilder.Ignore' in 'OnModelCreating'. +``` + +However, it's possible to use `byte[]` type or create explicit mapping to support `ReadOnlyMemory`. It's already implemented in `pgvector` package, but it's not clear whether it will work with different databases. + +Sources: +- https://github.com/pgvector/pgvector-dotnet/blob/master/README.md#entity-framework-core +- https://github.com/pgvector/pgvector-dotnet/blob/master/src/Pgvector/Vector.cs +- https://github.com/pgvector/pgvector-dotnet/blob/master/src/Pgvector.EntityFrameworkCore/VectorTypeMapping.cs + +### Testing + +Create Entity Framework connector and write the tests using SQLite database doesn't mean that this integration will work for other EF-supported databases. Each database implements its own set of Entity Framework features, so in order to ensure that Entity Framework connector covers main use-cases with specific database, unit/integration tests should be added using each database separately. + +Sources: +- https://github.com/mongodb/mongo-efcore-provider?tab=readme-ov-file#supported-features + +### Compatibility + +It's not possible to use latest Entity Framework Core package and develop it for .NET Standard. Last version of EF Core which supports .NET Standard was version 5.0 (latest EF Core version is 8.0). Which means that Entity Framework connector can target .NET 8.0 only (which is different from other available SK connectors today, which target both net8.0 and netstandard2.0). + +Another way would be to use Entity Framework 6, which can target both net8.0 and netstandard2.0, but this version of Entity Framework is no longer being actively developed. Entity Framework Core offers new features that won't be implemented in EF6. + +Sources: +- https://learn.microsoft.com/en-us/ef/core/miscellaneous/platforms +- https://learn.microsoft.com/en-us/ef/efcore-and-ef6/ + +### Existence of current SK connectors + +Taking into account that Semantic Kernel already has some integration with databases, which are also supported Entity Framework, there are multiple options how to proceed: +- Support both Entity Framework and DB connector (e.g. `Microsoft.SemanticKernel.Connectors.EntityFramework` and `Microsoft.SemanticKernel.Connectors.MongoDB`) - in this case both connectors should produce exactly the same outcome, so additional work will be required (such as implementing the same set of unit/integration tests) to ensure this state. Also, any modifications to the logic should be applied in both connectors. +- Support just one Entity Framework connector (e.g. `Microsoft.SemanticKernel.Connectors.EntityFramework`) - in this case, existing DB connector should be removed, which may be a breaking change to existing customers. An additional work will be required to ensure that Entity Framework covers exactly the same set of features as previous DB connector. +- Support just one DB connector (e.g. `Microsoft.SemanticKernel.Connectors.MongoDB`) - in this case, if such connector already exists - no additional work is required. If such connector doesn't exist and it's important to add it - additional work is required to implement that DB connector. + + +Table with Entity Framework and Semantic Kernel database support (only for databases which support vector search): + +|Database Engine|Maintainer / Vendor|Supported in EF|Supported in SK|Updated to SK memory v2 design +|-|-|-|-|-| +|Azure Cosmos|Microsoft|Yes|Yes|Yes| +|Azure SQL and SQL Server|Microsoft|Yes|Yes|No| +|SQLite|Microsoft|Yes|Yes|No| +|PostgreSQL|Npgsql Development Team|Yes|Yes|No| +|MongoDB|MongoDB|Yes|Yes|No| +|MySQL|Oracle|Yes|No|No| +|Oracle DB|Oracle|Yes|No|No| +|Google Cloud Spanner|Cloud Spanner Ecosystem|Yes|No|No| + +**Note**: +One database engine can have multiple Entity Framework integrations, which can be maintained by different vendors (e.g. there are 2 MySQL EF NuGet packages - one is maintained by Oracle and another one is maintained by Pomelo Foundation Project). + +Vector DB connectors which are additionally supported in Semantic Kernel: +- Azure AI Search +- Chroma +- Milvus +- Pinecone +- Qdrant +- Redis +- Weaviate + +Sources: +- https://learn.microsoft.com/en-us/ef/core/providers/?tabs=dotnet-core-cli#current-providers + +## Considered Options + +- Add new `Microsoft.SemanticKernel.Connectors.EntityFramework` connector. +- Do not add `Microsoft.SemanticKernel.Connectors.EntityFramework` connector, but add a new connector for individual database when needed. + +## Decision Outcome + +Based on the above investigation, the decision is not to add Entity Framework connector, but to add a new connector for individual database when needed. The reason for this decision is that Entity Framework providers do not uniformly support collection management operations and will require database specific code for key handling and object mapping. These factors will make use of an Entity Framework connector unreliable and it will not abstract away the underlying database. Additionally the number of vector databases that Entity Framework supports that Semantic Kernel does not have a memory connector for is very small. diff --git a/docs/decisions/0052-python-ai-connector-new-abstract-methods.md b/docs/decisions/0052-python-ai-connector-new-abstract-methods.md new file mode 100644 index 000000000000..6ee9d01a5dfa --- /dev/null +++ b/docs/decisions/0052-python-ai-connector-new-abstract-methods.md @@ -0,0 +1,80 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: { accepted } +contact: { Tao Chen } +date: { 2024-09-03 } +deciders: { Eduard van Valkenburg, Ben Thomas } +consulted: { Eduard van Valkenburg } +informed: { Eduard van Valkenburg, Ben Thomas } +--- + +# New abstract methods in `ChatCompletionClientBase` and `TextCompletionClientBase` (Semantic Kernel Python) + +## Context and Problem Statement + +The ChatCompletionClientBase class currently contains two abstract methods, namely `get_chat_message_contents` and `get_streaming_chat_message_contents`. These methods offer standardized interfaces for clients to engage with various models. + +> We will focus on `ChatCompletionClientBase` in this ADR but `TextCompletionClientBase` will be having a similar structure. + +With the introduction of function calling to many models, Semantic Kernel has implemented an amazing feature known as `auto function invocation`. This feature relieves developers from the burden of manually invoking the functions requested by the models, making the development process much smoother. + +Auto function invocation can cause a side effect where a single call to get_chat_message_contents or get_streaming_chat_message_contents may result in multiple calls to the model. However, this presents an excellent opportunity for us to introduce another layer of abstraction that is solely responsible for making a single call to the model. + +## Benefits + +- To simplify the implementation, we can include a default implementation of `get_chat_message_contents` and `get_streaming_chat_message_contents`. +- We can introduce common interfaces for tracing individual model calls, which can improve the overall monitoring and management of the system. +- By introducing this layer of abstraction, it becomes more efficient to add new AI connectors to the system. + +## Details + +### Two new abstract methods + +> Revision: In order to not break existing customers who have implemented their own AI connectors, these two methods are not decorated with the `@abstractmethod` decorator, but instead throw an exception if they are not implemented in the built-in AI connectors. + +```python +async def _inner_get_chat_message_content( + self, + chat_history: ChatHistory, + settings: PromptExecutionSettings +) -> list[ChatMessageContent]: + raise NotImplementedError +``` + +```python +async def _inner_get_streaming_chat_message_content( + self, + chat_history: ChatHistory, + settings: PromptExecutionSettings +) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: + raise NotImplementedError +``` + +### A new `ClassVar[bool]` variable in `ChatCompletionClientBase` to indicate whether a connector supports function calling + +This class variable will be overridden in derived classes and be used in the default implementations of `get_chat_message_contents` and `get_streaming_chat_message_contents`. + +```python +class ChatCompletionClientBase(AIServiceClientBase, ABC): + """Base class for chat completion AI services.""" + + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = False + ... +``` + +```python +class MockChatCompletionThatSupportsFunctionCalling(ChatCompletionClientBase): + + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True + + @override + async def get_chat_message_contents( + self, + chat_history: ChatHistory, + settings: "PromptExecutionSettings", + **kwargs: Any, + ) -> list[ChatMessageContent]: + if not self.SUPPORTS_FUNCTION_CALLING: + return ... + ... +``` diff --git a/docs/decisions/0053-dotnet-structured-outputs.md b/docs/decisions/0053-dotnet-structured-outputs.md new file mode 100644 index 000000000000..1b028ff58796 --- /dev/null +++ b/docs/decisions/0053-dotnet-structured-outputs.md @@ -0,0 +1,299 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: proposed +contact: dmytrostruk +date: 2024-09-10 +deciders: sergeymenshykh, markwallace, rbarreto, westey-m, dmytrostruk, ben.thomas, evan.mattson, crickman +--- + +# Structured Outputs implementation in .NET version of Semantic Kernel + +## Context and Problem Statement + +[Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) is a feature in OpenAI API that ensures the model will always generate responses based on provided JSON Schema. This gives more control over model responses, allows to avoid model hallucinations and write simpler prompts without a need to be specific about response format. This ADR describes several options how to enable this functionality in .NET version of Semantic Kernel. + +A couple of examples how it's implemented in .NET and Python OpenAI SDKs: + +.NET OpenAI SDK: +```csharp +ChatCompletionOptions options = new() +{ + ResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat( + name: "math_reasoning", + jsonSchema: BinaryData.FromString(""" + { + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "explanation": { "type": "string" }, + "output": { "type": "string" } + }, + "required": ["explanation", "output"], + "additionalProperties": false + } + }, + "final_answer": { "type": "string" } + }, + "required": ["steps", "final_answer"], + "additionalProperties": false + } + """), + strictSchemaEnabled: true) +}; + +ChatCompletion chatCompletion = await client.CompleteChatAsync( + ["How can I solve 8x + 7 = -23?"], + options); + +using JsonDocument structuredJson = JsonDocument.Parse(chatCompletion.ToString()); + +Console.WriteLine($"Final answer: {structuredJson.RootElement.GetProperty("final_answer").GetString()}"); +Console.WriteLine("Reasoning steps:"); +``` + +Python OpenAI SDK: + +```python +class CalendarEvent(BaseModel): + name: str + date: str + participants: list[str] + +completion = client.beta.chat.completions.parse( + model="gpt-4o-2024-08-06", + messages=[ + {"role": "system", "content": "Extract the event information."}, + {"role": "user", "content": "Alice and Bob are going to a science fair on Friday."}, + ], + response_format=CalendarEvent, +) + +event = completion.choices[0].message.parsed +``` + +## Considered Options + +**Note**: All of the options presented in this ADR are not mutually exclusive - they can be implemented and supported simultaneously. + +### Option #1: Use OpenAI.Chat.ChatResponseFormat object for ResponseFormat property (similar to .NET OpenAI SDK) + +This approach means that `OpenAI.Chat.ChatResponseFormat` object with JSON Schema will be constructed by user and provided to `OpenAIPromptExecutionSettings.ResponseFormat` property, and Semantic Kernel will pass it to .NET OpenAI SDK as it is. + +Usage example: + +```csharp +// Initialize Kernel +Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: "gpt-4o-2024-08-06", + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + +// Create JSON Schema with desired response type from string. +ChatResponseFormat chatResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat( + name: "math_reasoning", + jsonSchema: BinaryData.FromString(""" + { + "type": "object", + "properties": { + "Steps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "Explanation": { "type": "string" }, + "Output": { "type": "string" } + }, + "required": ["Explanation", "Output"], + "additionalProperties": false + } + }, + "FinalAnswer": { "type": "string" } + }, + "required": ["Steps", "FinalAnswer"], + "additionalProperties": false + } + """), + strictSchemaEnabled: true); + +// Pass ChatResponseFormat in OpenAIPromptExecutionSettings.ResponseFormat property. +var executionSettings = new OpenAIPromptExecutionSettings +{ + ResponseFormat = chatResponseFormat +}; + +// Get string result. +var result = await kernel.InvokePromptAsync("How can I solve 8x + 7 = -23?", new(executionSettings)); + +Console.WriteLine(result.ToString()); + +// Output: + +// { +// "Steps":[ +// { +// "Explanation":"Start with the equation: (8x + 7 = -23). The goal is to isolate (x) on one side of the equation. To begin, we need to remove the constant term from the left side of the equation.", +// "Output":"8x + 7 = -23" +// }, +// { +// "Explanation":"Subtract 7 from both sides of the equation to eliminate the constant from the left side.", +// "Output":"8x + 7 - 7 = -23 - 7" +// }, +// { +// "Explanation":"Simplify both sides: The +7 and -7 on the left will cancel out, while on the right side, -23 - 7 equals -30.", +// "Output":"8x = -30" +// }, +// { +// "Explanation":"Now, solve for (x) by dividing both sides of the equation by 8. This will isolate (x).", +// "Output":"8x / 8 = -30 / 8" +// }, +// { +// "Explanation":"Simplify the right side of the equation by performing the division: -30 divided by 8 equals -3.75.", +// "Output":"x = -3.75" +// } +// ], +// "FinalAnswer":"x = -3.75" +// } +``` + +Pros: +- This approach is already supported in Semantic Kernel without any additional changes, since there is a logic to pass `ChatResponseFormat` object as it is to .NET OpenAI SDK. +- Consistent with .NET OpenAI SDK. + +Cons: +- No type-safety. Information about response type should be manually constructed by user to perform a request. To access each response property, the response should be handled manually as well. It's possible to define a C# type and use JSON deserialization for response, but JSON Schema for request will still be defined separately, which means that information about the type will be stored in 2 places and any modifications to the type should be handled in 2 places. +- Inconsistent with Python version, where response type is defined in a class and passed to `response_format` property by simple assignment. + +### Option #2: Use C# type for ResponseFormat property (similar to Python OpenAI SDK) + +This approach means that `OpenAI.Chat.ChatResponseFormat` object with JSON Schema will be constructed by Semantic Kernel, and user just needs to define C# type and assign it to `OpenAIPromptExecutionSettings.ResponseFormat` property. + +Usage example: + +```csharp +// Define desired response models +private sealed class MathReasoning +{ + public List Steps { get; set; } + + public string FinalAnswer { get; set; } +} + +private sealed class MathReasoningStep +{ + public string Explanation { get; set; } + + public string Output { get; set; } +} + +// Initialize Kernel +Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: "gpt-4o-2024-08-06", + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + +// Pass desired response type in OpenAIPromptExecutionSettings.ResponseFormat property. +var executionSettings = new OpenAIPromptExecutionSettings +{ + ResponseFormat = typeof(MathReasoning) +}; + +// Get string result. +var result = await kernel.InvokePromptAsync("How can I solve 8x + 7 = -23?", new(executionSettings)); + +// Deserialize string to desired response type. +var mathReasoning = JsonSerializer.Deserialize(result.ToString())!; + +OutputResult(mathReasoning); + +// Output: + +// Step #1 +// Explanation: Start with the given equation. +// Output: 8x + 7 = -23 + +// Step #2 +// Explanation: To isolate the term containing x, subtract 7 from both sides of the equation. +// Output: 8x + 7 - 7 = -23 - 7 + +// Step #3 +// Explanation: To solve for x, divide both sides of the equation by 8, which is the coefficient of x. +// Output: (8x)/8 = (-30)/8 + +// Step #4 +// Explanation: This simplifies to x = -3.75, as dividing -30 by 8 gives -3.75. +// Output: x = -3.75 + +// Final answer: x = -3.75 +``` + +Pros: +- Type safety. Users won't need to define JSON Schema manually as it will be handled by Semantic Kernel, so users could focus on defining C# types only. Properties on C# type can be added or removed to change the format of desired response. `Description` attribute is supported to provide more detailed information about specific property. +- Consistent with Python OpenAI SDK. +- Minimal code changes are required since Semantic Kernel codebase already has a logic to build a JSON Schema from C# type. + +Cons: +- Desired type should be provided via `ResponseFormat = typeof(MathReasoning)` or `ResponseFormat = object.GetType()` assignment, which can be improved by using C# generics. +- Response coming from Kernel is still a `string`, so it should be deserialized to desired type manually by user. + +### Option #3: Use C# generics + +This approach is similar to Option #2, but instead of providing type information via `ResponseFormat = typeof(MathReasoning)` or `ResponseFormat = object.GetType()` assignment, it will be possible to use C# generics. + +Usage example: + +```csharp +// Define desired response models +private sealed class MathReasoning +{ + public List Steps { get; set; } + + public string FinalAnswer { get; set; } +} + +private sealed class MathReasoningStep +{ + public string Explanation { get; set; } + + public string Output { get; set; } +} + +// Initialize Kernel +Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: "gpt-4o-2024-08-06", + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + +// Get MathReasoning result. +var result = await kernel.InvokePromptAsync("How can I solve 8x + 7 = -23?"); + +OutputResult(mathReasoning); +``` + +Pros: +- Simple usage, no need in defining `PromptExecutionSettings` and deserializing string response later. + +Cons: +- Implementation complexity compared to Option #1 and Option #2: + 1. Chat completion service returns a string, so deserialization logic should be added somewhere to return a type instead of string. Potential place: `FunctionResult`, as it already contains `GetValue` generic method, but it doesn't contain deserialization logic, so it should be added and tested. + 2. `IChatCompletionService` and its methods are not generic, but information about the response type should still be passed to OpenAI connector. One way would be to add generic version of `IChatCompletionService`, which may introduce a lot of additional code changes. Another way is to pass type information through `PromptExecutionSettings` object. Taking into account that `IChatCompletionService` uses `PromptExecutionSettings` and not `OpenAIPromptExecutionSettings`, `ResponseFormat` property should be moved to the base execution settings class, so it's possible to pass the information about response format without coupling to specific connector. On the other hand, it's not clear if `ResponseFormat` parameter will be useful for other AI connectors. + 3. Streaming scenario won't be supported, because for deserialization all the response content should be aggregated first. If Semantic Kernel will do the aggregation, then streaming capability will be lost. + +## Out of scope + +Function Calling functionality is out of scope of this ADR, since Structured Outputs feature is already partially used in current function calling implementation by providing JSON schema with information about function and its arguments. The only remaining parameter to add to this process is `strict` property which should be set to `true` to enable Structured Outputs in function calling. This parameter can be exposed through `PromptExecutionSettings` type. + +By setting `strict` property to `true` for function calling process, the model should not create additional non-existent parameters or functions, which could resolve hallucination problems. On the other hand, enabling Structured Outputs for function calling will introduce additional latency during first request since the schema is processed first, so it may impact the performance, which means that this property should be well-documented. + +More information here: [Function calling with Structured Outputs](https://platform.openai.com/docs/guides/function-calling/function-calling-with-structured-outputs). + +## Decision Outcome + +1. Support Option #1 and Option #2, create a task for Option #3 to handle it separately. +2. Create a task for Structured Outputs in Function Calling and handle it separately. diff --git a/docs/decisions/0054-processes.md b/docs/decisions/0054-processes.md new file mode 100644 index 000000000000..040046acb84f --- /dev/null +++ b/docs/decisions/0054-processes.md @@ -0,0 +1,318 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: accepted +contact: bentho +date: September 20, 2024 +deciders: bentho, markwallace, estenori, crickman, eavanvalkenburg, evchaki +consulted: bentho, markwallace, estenori, crickman, eavanvalkenburg, evchaki, mabolan +informed: SK-3P-FTE +--- + +# Business Process Execution with Semantic Kernel + +## Context and Problem Statement + +We have heard from many customers about the need for an enterprise grade solution for automating AI-integrated business processes. +At a high level, the structure of a business process is: + +- Starts with external event +- Contains a collection of structured activities or tasks +- A defined sequence of these tasks that produces a service or product that adds value +- Serves a business goal + +In technical terms, a process is something that can be represented as a graph where nodes in the graph represent units of work and edges between nodes represent causal activations that may or may not also carry data. There are many examples of graph based workflow engines that are suitable for handling traditional enterprise processes. Examples include GitHub Actions & Workflows, Argo Workflows, Dapr Workflows, and many more. However, the additional requirements for integration with AI adds new requirements that may not be adequately supported by these frameworks. Features such as support for cycles in the graph, dynamically created nodes and edges, node and edge level metadata to support AI driven scenarios, and streamlined integration with AI orchestration are examples of things that are not fully supported by any of these. + +## Decision Drivers + +- Customers should be able to leverage their existing investments in all supported languages of Semantic Kernel. +- ``` +- Customers should be able to leverage their existing investments in infrastructure. +- Customers should be able to collaborate with their business process peers to build up composable processes. +- Customers should be able to use AI to enhance and streamline the steps within their business processes. +- Customers should be able to control the process flow in a defined and repeatable way. +- Customers should be able to easily model typical AI driven scenarios that may require cycles and dynamic edges. +- Processes should be able to support short lived transient business processes as well as long lived business processes. +- Processes should be able to be run locally, deployed as a single process or or deployed to a distributed service. +- Processes should be able to run and debug locally without additional software or infrastructure. +- Processes should be stateful and able resume from a paused state or a recoverable error. +- Regulated Customers should be able to audit currently running or completed processes end to end. + +## Considered Options + +### Options #1: + +**_Build existing samples on top of existing workflow frameworks_**: +This option was explored with frameworks such as Dapr Workflows, Argo, Durable Tasks, and others. Among the subset or these options that can support the technical requirements listed above, the main concern is the amount of overhead required to work with them. Many of these frameworks require a lot of code and infrastructure to get up and running and require special emulators to run locally which is undesirable. It's important to call out that this option is not mutually exclusive with the others, we may choose to build samples showing SK integrating with other workflow engines even if we choose to also go a different route. + +### Options #2: + +**_Build SK Process library within an existing workflow framework_**: +Of all the frameworks explored, the few that seem closest to meeting the technical requirements listed above are based on [Durable Tasks](https://github.com/Azure/durabletask). This includes things like Dapr Workflows, Azure Durable Functions, or the Durable Tasks Framework itself. Attempts to build a working solution on these frameworks resulted an awkward interface for basic scenarios due to the underlying structure of Durable Tasks where nodes are stateless and only the central orchestrator is stateful. While it is likely that many AI driven workflows could be modeled in this type of system, our exploration did not produce something we were happy with from a usability perspective. + +### Options #3: + +**_Build SK Process library with a custom build workflow engine_**: +Building a custom workflow engine might provide the cleanest integration but would require extensive resources and time that we don't have. Distributed workflow engines are products in and of themselves. + +### Options #4: + +**_Build platform agnostic SK Process library with connectors for existing workflow frameworks_**: +This is the chosen option. + +## Decision Outcome + +**_Chosen option - #4_**: Build platform agnostic SK Process library with connectors for existing workflow frameworks. +This was the only option that was ale to meet all the technical and scenario driven requirements. This option should allow for a simple and well-integrated interface into Semantic Kernel as well as the ability to support many existing distributed runtimes that will give our customers the flexibility to use their existing infrastructure and expertise. + +### Components of the Process library + +The proposed architecture of a Process is based on a graph execution model where nodes, which we call Steps, perform work by invoking user defined Kernel Functions. Edges in the graph are defined from an event driven perspective and carry metadata about the event as well as a data payload containing the output of the Kernel Function invocation. + +Starting from the ground up, the components of a processes are: + +1. **_KernelFunctions_**: The same KernelFunctions that our customers already know and use. Nothing new here. +1. **_Steps_**: Steps group one ore more KernelFunctions together into an object with optional user defined state. A step represents one unit of work within a process. Steps make the output of their work visible to other steps in the process by emitting events. This event based structure allows steps to be created without needing to know which process they are used in, allowing them to be reusable across multiple processes. +1. **_Process_**: A process groups multiple Steps together and defines the way that outputs flow from step to step. The process provides methods that allow the developer to define the routing of events that are emitted by steps by specifying the steps and associated KernelFunctions that should receive the event. + +![Basic Process diagram](./diagrams/process/process_diagram_basic.png) + +Let's look at the code required to create a simple process. + +#### Step1 - Define the Steps: + +Steps are required to inherit from the abstract `KernelStepBase` type which allows for optional implementation of activation and deactivation lifecycle methods. + +```csharp +// Define UserInputStep with no state +public class UserInputStep : KernelStepBase +{ + public override ValueTask ActivateAsync() + { + return ValueTask.CompletedTask; + } + + [KernelFunction()] + public string GetUserInput(string userMessage) + { + return $"User: {userMessage}"; + } +} + +``` + +The `UserInputStep` shown above is the minimum implementation of a step with one KernelFunction and no state management. The code in this step does not explicitly emit any events, however, execution of the `PrintUserMessage` will automatically emit an event indicating either the success of the execution with an associated result, or the failure of the execution with an associated error. + +Let's create a second step to take the user input and get a response from an LLM. This step will be stateful so that it can maintain an instance of `ChatHistory`. First define the class to use for tracking state: + +```csharp +public class ChatBotState +{ + public ChatHistory ChatMessages { get; set; } = new(); +} + +``` + +Next define the step: + +```csharp +// Define ChatBotResponseStep with state of type ChatBotState +public class ChatBotResponseStep : KernelStepBase +{ + private readonly Kernel _kernel; + internal ChatBotState? _state; + + public ChatBotResponseStep(Kernel kernel) + { + _kernel = kernel; + } + + public override ValueTask ActivateAsync(ChatBotState state) + { + _state = state; + _state.ChatMessages ??= new(); + return ValueTask.CompletedTask; + } + + [KernelFunction()] + public async Task GetChatResponse(KernelStepContext context, string userMessage) + { + _state!.ChatMessages.Add(new(AuthorRole.User, userMessage)); + IChatCompletionService chatService = _kernel.Services.GetRequiredService(); + ChatMessageContent response = await chatService.GetChatMessageContentAsync(_state.ChatMessages); + if (response != null) + { + _state.ChatMessages.Add(response!); + } + + // emit event: assistantResponse + context.PostEvent(new CloudEvent { Id = ChatBotEvents.AssistantResponseGenerated, Data = response }); + } +} + +``` + +The `ChatBotResponseStep` is a bit more realistic than `UserInputStep` and show the following features: + +**_State management_**: The first thing to notice is that the state object is automatically created by the Process and injected into the `ActivateAsync` method. The Process will automatically persist the state object immediately after successful execution of any of the step's KernelFunctions. Processes use JSON serialization to persist and rehydrate state objects so we require that these types have a default constructor and only contain objects that are JSON serializable. + +**_Step Context_**: The `GetChatResponse` KernelFunction has an argument of type `KernelStepContext` which is automatically provided by the Process. This object provides functionality that allow the step to explicitly emit events such as `ChatBotEvents.AssistantResponseGenerated` in this case. The step context can also provide functionality for advances scenarios such as utilizing durable timers and dynamically adding new steps to the process. + +**_Cloud Events_**: Events in Steps and Processes make use of [Cloud Events](https://github.com/cloudevents/spec). Cloud Events provide an open source and industry standard specification for describing event data in common formats to provide interoperability across services, platforms and systems. This will allow Processes to emit/receive events to/from external systems without requiring custom connectors or mapping middleware. + +#### Step2 - Define the Process: + +Now that we have our steps defined, we can move on to defining our process. The first thing to do is to add the steps to the process... + +```csharp + +KernelProcess process = new("ChatBot"); + +var userInputStep = process.AddStepFromType(isEntryPoint: true); +var responseStep = process.AddStepFromType(); + +``` + +The two steps steps created above have been added to our new `ChatBot` process and the `UserInputStep` has been declared as the entry point. This means that any events received by the process will be forwarded to this step. Now we need to define the flow of our process by describing which actions are triggered by events from our steps. + +```csharp + +// When the userInput step completes, send the output to the llm response step +userInputStep + .OnFunctionResult(nameof(UserInputStep.GetUserInput)) + .SendOutputTo(responseStep, nameof(ChatBotResponseStep.GetChatResponse), "userMessage"); + +``` + +In the code above, `userInputStep.OnFunctionResult(nameof(UserInputStep.GetUserInput))` selects the event that is emitted by the process on successful execution of the `GetUserInput` KernelFunction in the step instance referenced by `userInputStep`. It then returns a builder type object that provides actions based on the context. In this case the `SendOutputTo(responseStep, nameof(ChatBotResponseStep.GetChatResponse), "userMessage")` action is used to forward the event data to the `userMessage` parameter of the `GetChatResponse` KernelFunction on the step instance referenced by `responseStep`. + +One of the key takeaways here is that events emitted by a given step can be selected and forwarded to **_a specific parameter of a specific KernelFunction_** within another step. Event data sent to parameters of KernelFunctions are queued until all of the required parameters of the function have received input, at which point the function will be invoked. + +#### Step 3 - Get output from the Process: + +Now that we've defined our process, we would like to inspect the final result that it produces. In many cases the result of the process will be written to a database or queue or some other internal system and that's all that's needed. In some cases however, such as in the case of a process running in a server as the result of a synchronous REST call, there is a need to extract the result from the finished process so that it can be returned to the caller. In these cases handler functions can be registered on the process to be triggered by a specific event. + +Let's wire up the process above to run a handler function when the `ChatBotResponseStep` step completes. + +```csharp + +process.OnEvent(ChatBotEvents.AssistantResponseGenerated).Run((CloudEvent e) => +{ + result = (int)e.Data!; + Console.WriteLine($"Result: {result}"); +}); + +``` + +A key thing to notice is that the event emitted by the `ChatBotResponseStep` within the processes was also be emitted from the processes itself which allows us to register a handler for it. All events within a process will bubble up out of the process to the parent which may be the program running the process or may be another process. This pattern allows for nested processes where an existing process can be used as a step in another process. + +#### Step 4 - Process object model: + +The instance of `KernelProcess` that we've created is nothing more than an object model that describes the underlying graph. It contains a collection of steps that in turn contain a collection of edges. This object model is designed to be serializable in human readable formats such as Json/Yaml as allows the process definition to be decoupled from the system in which the process runs. + +```json +{ + "EntryPointId": "efbfc9ca0c1942a384d21402c9078784", + "Id": "19f669adfa5b40688e818e400cb9750c", + "Name": "NestedChatBot", + "StepType": "SemanticKernel.Processes.Core.KernelProcess, SemanticKernel.Processes.Core, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", + "StateType": "SemanticKernel.Processes.Core.DefaultState, SemanticKernel.Processes.Core, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", + "OutputEdges": {}, + "StepProxies": [ + { + "Id": "6fa2d6b513464eb5a4daa9b5ebc1a956", + "Name": "UserInputStep", + "StepType": "SkProcess.Orleans.Silo.UserInputStep, SkProcess.Orleans.Silo, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", + "StateType": "SkProcess.Orleans.Silo.UserInputState, SkProcess.Orleans.Silo, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", + "OutputEdges": { + "UserInputStep_6fa2d6b513464eb5a4dxa9b5ebc1a956.exit": [ + { + "SourceId": "6fa2d6b513464eb5a4dxa9b5ebc1a956", + "OutputTargets": [ + { + "StepId": "End", + "FunctionName": "", + "ParameterName": "" + } + ] + } + ], + "UserInputStep_6fa2d6b513464eb5a4dxa9b5ebc1a956.userInputReceived": [ + { + "SourceId": "6fa2d6b513464eb5a4daa9b5ebc1a956", + "OutputTargets": [ + { + "StepId": "5035d41383314343b99ebf6e1a1a1f99", + "FunctionName": "GetChatResponse", + "ParameterName": "userMessage" + } + ] + } + ] + } + }, + { + "Id": "5035d41383314343b99ebf6e1a1a1f99", + "Name": "AiResponse", + "StepType": "SemanticKernel.Processes.Core.KernelProcess, SemanticKernel.Processes.Core, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", + "StateType": "SemanticKernel.Processes.Core.DefaultState, SemanticKernel.Processes.Core, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", + "OutputEdges": { + "AiResponse_5035d41383314343b99ebf6e1a1a1f99.TransformUserInput.OnResult": [ + { + "SourceId": "5035d41383314343b99ebf6e1a1a1f99", + "OutputTargets": [ + { + "StepId": "6fa2d6b513464eb5a4daa9b5ebc1a956", + "FunctionName": "GetUserInput", + "ParameterName": "" + } + ] + } + ] + } + } + ] +} +``` + +#### Step 5 - Run the Process: + +Running a Process requires using a "connector" to a supported runtime. As part of the core packages we will include an in-process runtime that is capable of of running a process locally on a dev machine or in a server. This runtime will initially use memory or file based persistence and will allow for easy development and debugging. + +Additionally we will provide support for [Orleans](https://learn.microsoft.com/en-us/dotnet/orleans/overview) and [Dapr Actor](https://docs.dapr.io/developing-applications/building-blocks/actors/actors-overview/) based runtimes which will allow customers to easily deploy processes as a distributed and highly scalable cloud based system. + +### Packages + +The following packages will be created for Processes: + +- **_Microsoft.SemanticKernel.Process.Abstractions_** + + Contains common interfaces and DTOs used by all other packages. + +- **_Microsoft.SemanticKernel.Process.Core_** + + Contains core functionality for defining Steps and Processes. + +- **_Microsoft.SemanticKernel.Process.Server_** + + Contains the in-process runtime. + +- **_Microsoft.SemanticKernel.Process_** + + Contains Microsoft.SemanticKernel.Process.Abstractions, Microsoft.SemanticKernel.Process.Core, and Microsoft.SemanticKernel.Process.Server + +- **_Microsoft.SemanticKernel.Process.Orleans_** + + Contains the Orleans based runtime. + +- **_Microsoft.SemanticKernel.Process.Dapr_** + + Contains the Dapr based runtime. + +## More Information + +### Process runtime architecture: + +In validation of the proposed solution, two runtimes were created, one for the local/server scenario and one for the distributed actor scenario using Orleans. Both of these implementation were based on the [Pregel Algorithm](https://kowshik.github.io/JPregel/pregel_paper.pdf) for large-scale graph processing. This algorithm is well tested and well suited for single machine scenarios as well as distributed systems. More information on how the Pregel algorithm works can be found in the following links. + +- [Pregel - The Morning Paper](https://blog.acolyer.org/2015/05/26/pregel-a-system-for-large-scale-graph-processing/) +- [Pregel - Distributed Algorithms and Optimization](https://web.stanford.edu/~rezab/classes/cme323/S15/notes/lec8.pdf) diff --git a/docs/decisions/0054-python-streaming-content-for-token-usage.md b/docs/decisions/0054-python-streaming-content-for-token-usage.md new file mode 100644 index 000000000000..d0e8474e479b --- /dev/null +++ b/docs/decisions/0054-python-streaming-content-for-token-usage.md @@ -0,0 +1,170 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: { accepted } +contact: { Tao Chen } +date: { 2024-09-18 } +deciders: { Tao Chen } +consulted: { Eduard van Valkenburg, Evan Mattson } +informed: { Eduard van Valkenburg, Evan Mattson, Ben Thomas } +--- + +# Streaming Contents for Token Usage Information (Semantic Kernel Python) + +## Context and Problem Statement + +Currently, `StreamingChatMessageContent` (inherits from `StreamingContentMixin`) in Semantic Kernel requires a choice index to be specified. This creates a limitation since the token usage information from **OpenAI's streaming chat completion** API will be returned in the last chunk where the choices field will be empty, which leads to an unknown choice index for the chunk. For more information, please refer to the [OpenAI API documentation](https://platform.openai.com/docs/api-reference/chat/create) and look for the `stream_options` field. + +> The token usage information returned in the last chunk is the **total** token usage for the chat completion request regardless of the number of choices specified. That being said, there will be only one chunk containing the token usage information in the streaming response even when multiple choices are requested. + +Our current data structure for `StreamingChatMessageContent`: + +```Python +# semantic_kernel/content/streaming_chat_message_content.py +class StreamingChatMessageContent(ChatMessageContent, StreamingContentMixin): + +# semantic_kernel/content/chat_message_content.py +class ChatMessageContent(KernelContent): + content_type: Literal[ContentTypes.CHAT_MESSAGE_CONTENT] = Field(CHAT_MESSAGE_CONTENT_TAG, init=False) # type: ignore + tag: ClassVar[str] = CHAT_MESSAGE_CONTENT_TAG + role: AuthorRole + name: str | None = None + items: list[Annotated[ITEM_TYPES, Field(..., discriminator=DISCRIMINATOR_FIELD)]] = Field(default_factory=list) + encoding: str | None = None + finish_reason: FinishReason | None = None + +# semantic_kernel/content/streaming_content_mixin.py +class StreamingContentMixin(KernelBaseModel, ABC): + choice_index: int + +# semantic_kernel/content/kernel_content.py +class KernelContent(KernelBaseModel, ABC): + inner_content: Any | None = None + ai_model_id: str | None = None + metadata: dict[str, Any] = Field(default_factory=dict) +``` + +## Proposal 1 + +In non-streaming responses, the token usage is returned as part of the response from the model along with the choices that can be more than one. We then parse the choices into individual `ChatMessageContent`s, with each containing the token usage information, even though the token usage is for the entire response, not just the individual choice. + +Considering the same strategy, all choices from the streaming response should contain the token usage information when they are eventually concatenated by their `choice_index`. Since we know the number of choices requested, we can perform the following steps: + +1. Replicate the last chunk for each choice requested to create a list of `StreamingChatMessageContent`s, with the token usage information included in the metadata. +2. Assign a choice index to each replicated chunk, starting from 0. +3. Stream the replicated chunks in a list back to the client. + +### Additional considerations + +Currently, when two `StreamingChatMessageContent`s are "added" together, the metadata is not merged. We need to ensure that the metadata is merged when the chunks are concatenated. When there are conflicting metadata keys, the metadata from the second chunk should overwrite the metadata from the first chunk: + +```Python +class StreamingChatMessageContent(ChatMessageContent, StreamingContentMixin): + ... + + def __add__(self, other: "StreamingChatMessageContent") -> "StreamingChatMessageContent": + ... + + return StreamingChatMessageContent( + ..., + metadata=self.metadata | other.metadata, + ... + ) + + ... +``` + +### Risks + +There are no breaking changes and known risks associated with this proposal. + +## Proposal 2 + +We allow the choice index to be optional in the `StreamingContentMixin` class. This will allow the choice index to be `None` when the token usage information is returned in the last chunk. The choice index will be set to `None` in the last chunk, and the client can handle the token usage information accordingly. + +```Python +# semantic_kernel/content/streaming_content_mixin.py +class StreamingContentMixin(KernelBaseModel, ABC): + choice_index: int | None +``` + +This is a simpler solution compared to Proposal 1, and it is more in line with what the OpenAI API returns, that is the token usage is not associated with any specific choice. + +### Risks + +This is potentially a breaking change since the `choice_index` field is currently required. This approach also makes streaming content concatenation more complex since the choice index will need to be handled differently when it is `None`. + +## Proposal 3 + +We will merge `ChatMessageContent` and `StreamingChatMessageContent` into a single class, `ChatMessageContent`, and mark `StreamingChatMessageContent` as deprecated. The `StreamingChatMessageContent` class will be removed in a future release. Then we apply the either [Proposal 1](#proposal-1) or [Proposal 2](#proposal-2) to the `ChatMessageContent` class to handle the token usage information. + +This approach simplifies the codebase by removing the need for a separate class for streaming chat messages. The `ChatMessageContent` class will be able to handle both streaming and non-streaming chat messages. + +```Python +# semantic_kernel/content/streaming_chat_message_content.py +@deprecated("StreamingChatMessageContent is deprecated. Use ChatMessageContent instead.") +class StreamingChatMessageContent(ChatMessageContent): + pass + +# semantic_kernel/content/chat_message_content.py +class ChatMessageContent(KernelContent): + ... + # Add the choice_index field to the ChatMessageContent class and make it optional + choice_index: int | None + + # Add the __add__ method to merge the metadata when two ChatMessageContent instances are added together. This is currently an abstract method in the `StreamingContentMixin` class. + def __add__(self, other: "ChatMessageContent") -> "ChatMessageContent": + ... + + return ChatMessageContent( + ..., + choice_index=self.choice_index, + ... + ) + + # Add the __bytes__ method to return the bytes representation of the ChatMessageContent instance. This is currently an abstract method in the `StreamingContentMixin` class. + def __bytes__(self) -> bytes: + ... +``` + +### Risks + +We are unifying the returned data structure for streaming and non-streaming chat messages, which may lead to confusion for developers initially, especially if they are not aware of the deprecation of the `StreamingChatMessageContent` class, or they came from SK .Net. It may also create a sharper learning curve if developers started with Python but later move to .Net for production. This approach also introduces breaking changes to our AI connectors as the returned data type will be different. + +> We will also need to update the `StreamingTextContent` and `TextContent` in a similar way too for this proposal. + +## Proposal 4 + +Similar to [Proposal 3](#proposal-3), we will merge `ChatMessageContent` and `StreamingChatMessageContent` into a single class, `ChatMessageContent`, and mark `StreamingChatMessageContent` as deprecated. In addition, we will introduce another a new mixin called `ChatMessageContentConcatenationMixin` to handle the concatenation of two `ChatMessageContent` instances. Then we apply the either [Proposal 1](#proposal-1) or [Proposal 2](#proposal-2) to the `ChatMessageContent` class to handle the token usage information. + +```Python +# semantic_kernel/content/streaming_chat_message_content.py +@deprecated("StreamingChatMessageContent is deprecated. Use ChatMessageContent instead.") +class StreamingChatMessageContent(ChatMessageContent): + pass + +# semantic_kernel/content/chat_message_content.py +class ChatMessageContent(KernelContent, ChatMessageContentConcatenationMixin): + ... + # Add the choice_index field to the ChatMessageContent class and make it optional + choice_index: int | None + + # Add the __bytes__ method to return the bytes representation of the ChatMessageContent instance. This is currently an abstract method in the `StreamingContentMixin` class. + def __bytes__(self) -> bytes: + ... + +class ChatMessageContentConcatenationMixin(KernelBaseModel, ABC): + def __add__(self, other: "ChatMessageContent") -> "ChatMessageContent": + ... +``` + +This approach separates the concerns of the `ChatMessageContent` class and the concatenation logic into two separate classes. This can help to keep the codebase clean and maintainable. + +### Risks + +Same as [Proposal 3](#proposal-3). + +## Decision Outcome + +To minimize the impact on customers and the existing codebase, we will go with [Proposal 1](#proposal-1) to handle the token usage information in the OpenAI streaming responses. This proposal is backward compatible and aligns with the current data structure for non-streaming responses. We will also ensure that the metadata is merged correctly when two `StreamingChatMessageContent` instances are concatenated. This approach also makes sure the token usage information will be associated to all choices in the streaming response. + +[Proposal 3](#proposal-3) and [Proposal 4](#proposal-4) are still valid but perhaps premature at this stage as most services still return objects of different types for streaming and non-streaming responses. We will keep them in mind for future refactoring efforts. diff --git a/docs/decisions/0055-dotnet-azureopenai-stable-version-strategy.md b/docs/decisions/0055-dotnet-azureopenai-stable-version-strategy.md new file mode 100644 index 000000000000..c4423169280f --- /dev/null +++ b/docs/decisions/0055-dotnet-azureopenai-stable-version-strategy.md @@ -0,0 +1,226 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: accepted +contact: rogerbarreto +date: 2024-10-03 +deciders: sergeymenshykh, markwallace, rogerbarreto, westey-m, dmytrostruk, evchaki +consulted: crickman +--- + +# Connectors Versioning Strategy for Underlying SDKs + +## Context and Problem Statement + +This week (01-10-2024) OpenAI and Azure OpenAI released their first stable version and we need to bring some options ahead of us regarding how to move forward with the versioning strategy for the next releases of `OpenAI` and `AzureOpenAI` connectors which will also set the path moving forward with other connectors and providers versioning strategies. + +This ADR brings different options how we can move forward thinking on the impact on the users and also how to keep a clear message on our strategy. + +Currently, Azure Open AI GA package against what we were expecting choose remove many of the features previously available in preview packages from their first GA version. + +This also requires us to rethink how we are going to proceed with our strategy for the following versions of our connectors. + +| Name | SDK NameSpace | Semantic Kernel NameSpace | +| ------------------- | --------------- | ----------------------------------------------- | +| OpenAI (OAI) | OpenAI | Microsoft.SemanticKernel.Connectors.OpenAI | +| Azure OpenAI (AOAI) | Azure.AI.OpenAI | Microsoft.SemanticKernel.Connectors.AzureOpenAI | + +## Decision Drivers + +- Minimize the impact of customers +- Allow customers to use either GA or Beta versions of OpenAI and Azure.AI.OpenAI packages +- Keep a clear message on our strategy +- Keep the compatibility with the previous versions +- Our package versioning should make it clear which version of OpenAI or Azure.AI.OpenAI packages we depend on +- Follow the Semantic Kernel versioning strategy in a way that accommodates well with other SDK version strategies. + +## Considered Options + +1. **Keep As-Is** - Target only preview packages. +2. **Preview + GA versioning** (Create a new version (GA + pre-release) side by side of the Azure OpenAI and OpenAI Connectors). +3. **Stop targeting preview packages**, only target GA packages moving forward. + +## 1. Keep As-Is - Target only preview packages + +This option will keep the current strategy of targeting only preview packages, which will keep the compatibility with the previous versions and new GA targeting versions and pipelines for our customers. This option has the least impact on our users and our pipeline strategy. + +Today all customers that are already using Azure OpenAI Connector have their pipelines configured to use the preview packages. + +```mermaid +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'showBranches': true, 'showCommitLabel':true,'mainBranchName': 'SemanticKernel'}} }%% + gitGraph TB: + checkout SemanticKernel + commit id:"SK 1.21" + branch OpenAI + commit id:"OAI 2.0-beta.12" + branch AzureOpenAI + commit id:"AOAI 2.0-beta.6" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.22" + merge AzureOpenAI id:"SK AOAI 1.22" + checkout OpenAI + commit id:"OAI 2.0 GA" + checkout AzureOpenAI + merge OpenAI id:"AOAI 2.0 GA" + checkout SemanticKernel + commit id:"Skipped GA's" + checkout OpenAI + commit id:"OAI 2.1-beta.1" + checkout AzureOpenAI + commit id:"AOAI 2.1-beta.1" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.23" + merge AzureOpenAI id:"SK AOAI 1.23" +``` + +Pros: + +- No changes in strategy. (Least impact on customers) +- Keep the compatibility with the previous versions and new GA targeting versions and pipelines. +- Compatible with our previous strategy of targeting preview packages. +- Azure and OpenAI SDKs will always be in sync with new GA versions, allowing us to keep the targeting preview with the latest GA patches. + +Cons: + +- There won't be a SK connector version that targets a stable GA package for OpenAI or AzureOpenAI. +- New customers that understand and target GA only available features and also have a strict requirement for dependent packages to be also GA will not be able to use the SK connector. (We don't have an estimate but this could be very small compared to the number of customers that are already OK on using the preview Azure SDK OpenAI SDK available for the past 18 months) +- Potential unexpected breaking changes introduced by OpenAI and Azure.AI.OpenAI beta versions that eventually we might be passing on due to their dependency. + +## 2. Preview + GA versioning + +This option we will introduce pre-release versions of the connectors: + +1. General Available (GA) versions of the connector will target a GA version of the SDK. +2. Pre-release versions of the connector will target a pre-release versions of the SDK. + +This option has some impact for customers that were targeting strictly only GA packages on their pipeline while using preview features that are not available anymore on underlying SDK GA versions. + +All preview only functionalities not available in the SDK will be Annotate in Semantic kernel connectors with an Experimental `SKEXP0011` dedicated identifier attribute, to identify and clarify the potential impact when attempting to move to a `GA` package. +Those annotations will be removed as soon as they are officially supported on the GA version of the SDK. + +```mermaid +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'showBranches': true, 'showCommitLabel':true,'mainBranchName': 'SemanticKernel'}} }%% + gitGraph TB: + checkout SemanticKernel + commit id:"SK 1.21" + branch OpenAI + commit id:"OAI 2.0-beta.12" + branch AzureOpenAI + commit id:"AOAI 2.0-beta.6" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.22-beta" + merge AzureOpenAI id:"SK AOAI 1.22-beta" + checkout OpenAI + commit id:"OAI 2.0 GA" + checkout AzureOpenAI + merge OpenAI id:"AOAI 2.0 GA" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.23" + merge AzureOpenAI id:"SK AOAI 1.23" + checkout OpenAI + commit id:"OAI 2.1-beta.1" + checkout AzureOpenAI + merge OpenAI id:"AOAI 2.1-beta.1" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.23-beta" + merge AzureOpenAI id:"SK AOAI 1.23-beta" + checkout OpenAI + commit id:"OAI 2.1-beta.2" + checkout AzureOpenAI + merge OpenAI id:"AOAI 2.1-beta.2" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.24-beta" + checkout SemanticKernel + merge AzureOpenAI id:"SK AOAI 1.24-beta" +``` + +Pros: + +- We send a clear message moving forward regarding what Azure and OpenAI consider stable and what is not, exposing only stable features from those SDKs in what we previously were considering as GA available features. +- New customers that have a strict requirement for dependent packages to be also GA will be able to use the SK connector. +- We will be able to have preview versions of Connectors for new features that are not yet GA without impacting the GA versions of the Connectors. + +Cons: + +- This change our strategy for versioning, needing to some clear clarification and communication for the first releases to mitigate impact or smooth the transition. +- Customers that were using `OpenAI` and `AzureOpenAI` preview only features available in previous SK GA packages will need to update their pipelines to target only future SK pre-release versions. +- Small Overhead to maintain two versions of the connectors. + +### Version and Branching Strategy + +Create a special release branch for the targeted `GA` version of the connector, keeping it in the record for that release with all modifications/removal that all the other projects need to make to work with the stable release this will be also a important guideline on where and when to add/remove the `SKEXP0011` exceptions from API's samples. + +We will follow our own version cadence with the addition of `beta` prefix for `beta` versions of the underlying SDKs. + +| Seq | OpenAI Version | Azure OpenAI Version | Semantic Kernel Version1 | Branch | +| --- | -------------- | -------------------- | ----------------------------------- | --------------- | +| 1 | 2.0.0 | 2.0.0 | 1.25.0 | releases/1.25.0 | +| 2 | 2.1.0-beta.1 | 2.1.0-beta.1 | 1.26.0-beta | main | +| 3 | 2.1.0-beta.3 | 2.1.0-beta.2 | 1.27.0-beta | main | +| 4 | No changes | No changes | 1.27.1-beta**2** | main | +| 5 | 2.1.0 | 2.1.0 | 1.28.0 | releases/1.28.0 | +| 6 | 2.2.0-beta.1 | 2.1.0-beta.1 | 1.29.0-beta | main | + +1. Versions apply for the **Connectors packages** and the **Semantic Kernel meta package**. +2. No changes on the SDKs but other minor changes to Semantic Kernel code base that needed a version update. + +### Optional Smoothing Transition + +In the intend to smooth the transition and mitigate impact on customers using preview features on SK GA packages straight away we would provide a notice period where we give the time for customers adapt to the `preview` vs `GA` future releases of the connector packages. While for the notice duration we would maintain our strategy with the **Keep As-Is** option before shifting to the **Preview + GA versioning** option. + +## 3. Stop targeting preview packages + +> [!WARNING] +> This option is not recommended but needs to be considered. + +This option will stop targeting preview packages, being strict with our 1.0 GA strategy, not exposing our customers to non-GA SDK features. + +As big features like Azure Assistants are still in preview, this option will have a big impact on our customers if they were targeting Agent frameworks and other important features that are not yet General Available. Described in [here](https://github.com/Azure/azure-sdk-for-net/releases/tag/Azure.AI.OpenAI_2.0.0) + +> Assistants, Audio Generation, Batch, Files, Fine-Tuning, and Vector Stores are not yet included in the GA surface; they will continue to be available in preview library releases and the originating Azure OpenAI Service api-version labels. + +```mermaid +%%{init: { 'logLevel': 'debug', 'theme': 'base', 'gitGraph': {'showBranches': true, 'showCommitLabel':true,'mainBranchName': 'SemanticKernel'}} }%% + gitGraph TB: + checkout SemanticKernel + commit id:"SK 1.21.1" + branch OpenAI + commit id:"OAI 2.0.0-beta.12" + branch AzureOpenAI + commit id:"AOAI 2.0.0-beta.6" + checkout OpenAI + commit id:"OAI 2.0.0 GA" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.22.0" + checkout AzureOpenAI + merge OpenAI id:"AOAI 2.0.0 GA" + checkout SemanticKernel + merge AzureOpenAI id:"SK AOAI 1.22.0" + checkout OpenAI + commit id:"OAI 2.1.0-beta.1" + checkout AzureOpenAI + commit id:"AOAI 2.1.0-beta.1" + checkout OpenAI + commit id:"OAI 2.1.0 GA" + checkout SemanticKernel + merge OpenAI id:"SK OAI 1.23.0" + checkout AzureOpenAI + commit id:"AOAI 2.1.0 GA" + checkout SemanticKernel + merge AzureOpenAI id:"SK AOAI 1.23.0" + +``` + +Pros: + +- As we have been only deploying GA versions of the connector, strictly we would be following a responsible GA only approach with GA SK packages not exposing customers to preview features as GA features at all. + +Cons: + +- Big impact on customers that are targeting preview features with no option to resort to a preview version of the connector. +- This strategy will render the use of the Semantic Kernel with Assistants and any other preview feature in Azure impractical. + +## Decision Outcome + +Chosen option: **Keep as is** + +As the current AI landscape for SDK is a fast changing environment, we need to be able be update and at the same time avoid as much as possible mix our current versioning strategy also minimizing the impact on customers. We decided on **Keep As-Is** option for now, and we may reconsider **Preview + GA versioning** option in the future when that decision doesn't bring big impact of lack of important functionality already used by our customer base. diff --git a/docs/decisions/0055-python-structured-output.md b/docs/decisions/0055-python-structured-output.md new file mode 100644 index 000000000000..942ffb5ac4e6 --- /dev/null +++ b/docs/decisions/0055-python-structured-output.md @@ -0,0 +1,220 @@ +--- +# These are optional elements. Feel free to remove any of them. +status: { in-progress } +contact: { Evan Mattson } +date: { 2024-09-10 } +deciders: { Ben Thomas } +consulted: { Dmytro Struk } +informed: + { Eduard van Valkenburg, Ben Thomas, Tao Chen, Dmytro Struk, Mark Wallace } +--- + +# Supporting OpenAI's Structured Output in Semantic Kernel Python + +## Context + +Last year, OpenAI introduced JSON mode, an essential feature for developers aiming to build reliable AI-driven applications. While JSON mode helps improve model reliability in generating valid JSON outputs, it falls short of enforcing strict adherence to specific schemas. This limitation has led developers to employ workaroundsโ€”such as custom open-source tools, iterative prompting, and retriesโ€”to ensure that the output conforms to required formats. + +To address this issue, OpenAI has introduced **Structured Outputs**โ€”a feature designed to ensure that model-generated outputs conform precisely to developer-specified JSON Schemas. This advancement allows developers to build more robust applications by providing guarantees that AI outputs will match predefined structures, improving interoperability with downstream systems. + +In recent evaluations, the new GPT-4o-2024-08-06 model with Structured Outputs demonstrated a perfect 100% score in adhering to complex JSON schemas, compared to GPT-4-0613, which scored less than 40%. Structured Outputs streamline the process of generating reliable structured data from unstructured inputs, a core need in various AI-powered applications such as data extraction, automated workflows, and function calling. + +--- + +## Problem Statement + +Developers building AI-driven solutions using the OpenAI API often face challenges when extracting structured data from unstructured inputs. Ensuring model outputs conform to predefined JSON schemas is critical for creating reliable and interoperable systems. However, current models, even with JSON mode, do not guarantee schema conformity, leading to inefficiencies, errors, and additional development overhead in the form of retries and custom tools. + +With the introduction of Structured Outputs, OpenAI models are now able to strictly adhere to developer-provided JSON schemas. This feature eliminates the need for cumbersome workarounds and provides a more streamlined, efficient way to ensure consistency and reliability in model outputs. Integrating Structured Outputs into the Semantic Kernel orchestration SDK will enable developers to create more powerful, schema-compliant applications, reduce errors, and improve overall productivity. + +## Out of scope + +This ADR will focus on the `structured outputs` `response_format` and not on the function calling aspect. A subsequent ADR will be created around that in the future. + +## Using Structured Outputs + +### Response Format + +OpenAI offers a new way to set the `response_format` on the prompt execution settings attribute: + +```python +from pydantic import BaseModel + +from openai import OpenAI + + +class Step(BaseModel): + explanation: str + output: str + + +class MathResponse(BaseModel): + steps: list[Step] + final_answer: str + + +client = AsyncOpenAI() + +completion = await client.beta.chat.completions.parse( + model="gpt-4o-2024-08-06", + messages=[ + {"role": "system", "content": "You are a helpful math tutor."}, + {"role": "user", "content": "solve 8x + 31 = 2"}, + ], + response_format=MathResponse, # for example, a Pydantic model type is directly configured +) + +message = completion.choices[0].message +if message.parsed: + print(message.parsed.steps) + print(message.parsed.final_answer) +else: + print(message.refusal) +``` + +For non-Pydantic models, SK will need to use the `KernelParameterMetadata`'s `schema_data` attribute. This represents the JSON Schema of the SK function: + +```json +{ + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "explanation": { + "type": "string" + }, + "output": { + "type": "string" + } + }, + "required": ["explanation", "output"], + "additionalProperties": false + } + }, + "final_answer": { + "type": "string" + } + }, + "required": ["steps", "final_answer"], + "additionalProperties": false +} +``` + +to create the required `json_schema` `response_format`: + +```json +"response_format": { + "type": "json_schema", + "json_schema": { + "name": "math_response", + "strict": true, + "schema": { // start of existing SK `schema_data` from above + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "explanation": { + "type": "string" + }, + "output": { + "type": "string" + } + }, + "required": ["explanation", "output"], + "additionalProperties": false + } + }, + "final_answer": { + "type": "string" + } + }, + "required": ["steps", "final_answer"], + "additionalProperties": false + } // end of existing SK `schema_data` from above + } +} +``` + +#### Handling the Streaming Response Format + +The new `structured output` response format is in beta, and the streaming chat completion code should be handled like this (which is different than our current streaming chat completion call): + +```python +async with client.beta.chat.completions.stream( + model='gpt-4o-mini', + messages=messages, + tools=[pydantic_function_tool(SomeClass)], +) as stream: + async for event in stream: + if event.type == 'content.delta': + print(event.delta, flush=True, end='') + elif event.type == 'content.done': + content = event.content + elif event.type == 'tool_calls.function.arguments.done': + tool_calls.append({'name': event.name, 'parsed_arguments': event.parsed_arguments}) + +print(content) +``` + +The `OpenAIHandler` class, which manages chat completions, will need to handle the new structured output streaming method, similar to: + +```python +async def _initiate_chat_stream(self, settings: OpenAIChatPromptExecutionSettings): + """Initiate the chat stream request and return the stream.""" + return self.client.beta.chat.completions.stream( + model='gpt-4o-mini', + messages=settings.messages, + tools=[pydantic_function_tool(SomeClass)], + ) + +async def _handle_chat_stream(self, stream): + """Handle the events from the chat stream.""" + async for event in stream: + if event.type == 'content.delta': + chunk_metadata = self._get_metadata_from_streaming_chat_response(event) + yield [ + self._create_streaming_chat_message_content(event, event.delta, chunk_metadata) + ] + elif event.type == 'tool_calls.function.arguments.done': + # Handle tool call results as needed + tool_calls.append({'name': event.name, 'parsed_arguments': event.parsed_arguments}) + +# An example calling method could be: +async def _send_chat_stream_request(self, settings: OpenAIChatPromptExecutionSettings): + """Send the chat stream request and handle the stream.""" + async with await self._initiate_chat_stream(settings) as stream: + async for chunk in self._handle_chat_stream(stream): + yield chunk +``` + +The method for handling the stream or non-streaming chat completion will be based on the `response_format` execution setting -- whether it uses a Pydantic model type or a JSON Schema. + +Since the `response_format` chat completion method differs from the current chat completion approach, we will need to maintain separate implementations for handling chat completions until OpenAI officially integrates the `response_format` method into the main library upon its graduation. + +### Callouts + +- The `structured output` `response_format` is limited to a single object type at this time. We will use a Pydantic validator to make sure a user is only specifying the proper type/amount of objects: + +```python +@field_validator("response_format", mode="before") + @classmethod + def validate_response_format(cls, value): + """Validate the response_format parameter.""" + if not isinstance(value, dict) and not (isinstance(value, type) and issubclass(value, BaseModel)): + raise ServiceInvalidExecutionSettingsError( + "response_format must be a dictionary or a single Pydantic model class" + ) + return value +``` + +- We need to provide good (and easy-to-find) documentation to let users and developers know which OpenAI/AzureOpenAI models/API-versions support `structured outputs`. + +### Chosen Solution + +- Response Format: Since there's a single approach here, we should integrate a clean implementation to define both streaming and non-streaming chat completions using our existing `OpenAIChatCompletionBase` and `OpenAIHandler` code. diff --git a/docs/decisions/diagrams/process/process_diagram_basic.png b/docs/decisions/diagrams/process/process_diagram_basic.png new file mode 100644 index 000000000000..2af77de70b3b Binary files /dev/null and b/docs/decisions/diagrams/process/process_diagram_basic.png differ diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props index 751afab85104..94d748c78057 100644 --- a/dotnet/Directory.Build.props +++ b/dotnet/Directory.Build.props @@ -11,6 +11,11 @@ disable + + + false + + disable @@ -30,4 +35,4 @@ <_Parameter1>false - \ No newline at end of file + diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index b1c7dc58eddc..c36351f3f88e 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -5,9 +5,11 @@ true + + + - - + @@ -18,7 +20,7 @@ - + @@ -27,8 +29,8 @@ - - + + @@ -36,8 +38,7 @@ - - + @@ -52,19 +53,21 @@ - + - + + + - + - + @@ -72,48 +75,48 @@ - - - - + + + + - - + + - + - + - + - + - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive @@ -134,8 +137,8 @@ runtime; build; native; contentfiles; analyzers; buildtransitive - - - + + + \ No newline at end of file diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index b6cd87d2040b..ea92dd3b4cad 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -96,6 +96,10 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{5C246969-D src\InternalUtilities\test\TestInternalUtilities.props = src\InternalUtilities\test\TestInternalUtilities.props EndProjectSection EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "connectors", "connectors", "{314A2705-0F70-44B6-8988-C6DF77BDFD42}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AI", "AI", "{C7299F56-3A55-471E-B10E-B1FBE101C625}" +EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{958AD708-F048-4FAF-94ED-D2F2B92748B9}" ProjectSection(SolutionItems) = preProject src\InternalUtilities\src\InternalUtilities.props = src\InternalUtilities\src\InternalUtilities.props @@ -199,10 +203,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AI", "AI", "{1B4CBDE0-10C2- EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Memory", "Memory", "{24503383-A8C4-4255-9998-28D70FE8E99A}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Agents", "src\Experimental\Agents\Experimental.Agents.csproj", "{5438D1E3-E03D-444B-BBBA-478F93161AA8}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Agents.UnitTests", "src\Experimental\Agents.UnitTests\Experimental.Agents.UnitTests.csproj", "{4AD80279-9AC1-476F-8103-E6CD5E4FD525}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Orchestration.Flow", "src\Experimental\Orchestration.Flow\Experimental.Orchestration.Flow.csproj", "{B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Experimental.Orchestration.Flow.IntegrationTests", "src\Experimental\Orchestration.Flow.IntegrationTests\Experimental.Orchestration.Flow.IntegrationTests.csproj", "{3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3}" @@ -214,7 +214,6 @@ EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-AE5F-4080-8790-13EB16323CEF}" ProjectSection(SolutionItems) = preProject src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs - src\InternalUtilities\src\Text\ReadOnlyMemoryConverter.cs = src\InternalUtilities\src\Text\ReadOnlyMemoryConverter.cs src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs src\InternalUtilities\src\Text\SseJsonParser.cs = src\InternalUtilities\src\Text\SseJsonParser.cs src\InternalUtilities\src\Text\SseLine.cs = src\InternalUtilities\src\Text\SseLine.cs @@ -318,9 +317,49 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Ollama", "src\Connectors\Connectors.Ollama\Connectors.Ollama.csproj", "{E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureCosmosDBMongoDB.UnitTests", "src\Connectors\Connectors.AzureCosmosDBMongoDB.UnitTests\Connectors.AzureCosmosDBMongoDB.UnitTests.csproj", "{2918478E-BC86-4D53-9D01-9C318F80C14F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "AIModelRouter", "samples\Demos\AIModelRouter\AIModelRouter.csproj", "{E06818E3-00A5-41AC-97ED-9491070CDEA1}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Ollama.UnitTests", "src\Connectors\Connectors.Ollama.UnitTests\Connectors.Ollama.UnitTests.csproj", "{924DB138-1223-4C99-B6E6-0938A3FA14EF}" +EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AIModelRouter", "samples\Demos\AIModelRouter\AIModelRouter.csproj", "{E06818E3-00A5-41AC-97ED-9491070CDEA1}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureCosmosDBNoSQL.UnitTests", "src\Connectors\Connectors.AzureCosmosDBNoSQL.UnitTests\Connectors.AzureCosmosDBNoSQL.UnitTests.csproj", "{385A8FE5-87E2-4458-AE09-35E10BD2E67F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.OpenAI.UnitTests", "src\Connectors\Connectors.OpenAI.UnitTests\Connectors.OpenAI.UnitTests.csproj", "{36DDC119-C030-407E-AC51-A877E9E0F660}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureOpenAI", "src\Connectors\Connectors.AzureOpenAI\Connectors.AzureOpenAI.csproj", "{7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureOpenAI.UnitTests", "src\Connectors\Connectors.AzureOpenAI.UnitTests\Connectors.AzureOpenAI.UnitTests.csproj", "{8CF06B22-50F3-4F71-A002-622DB49DF0F5}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "FunctionCalling", "FunctionCalling", "{F58468D3-D635-4774-98B1-E1B5DE90A7FF}" + ProjectSection(SolutionItems) = preProject + src\InternalUtilities\connectors\AI\FunctionCalling\FunctionCallingUtilities.props = src\InternalUtilities\connectors\AI\FunctionCalling\FunctionCallingUtilities.props + src\InternalUtilities\connectors\AI\FunctionCalling\FunctionCallsProcessor.cs = src\InternalUtilities\connectors\AI\FunctionCalling\FunctionCallsProcessor.cs + EndProjectSection +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Weaviate.UnitTests", "src\Connectors\Connectors.Weaviate.UnitTests\Connectors.Weaviate.UnitTests.csproj", "{E8FC97B0-B417-4A90-993C-B8AA9223B058}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureAIInference", "src\Connectors\Connectors.AzureAIInference\Connectors.AzureAIInference.csproj", "{063044B2-A901-43C5-BFDF-5E4E71C7BC33}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.AzureAIInference.UnitTests", "src\Connectors\Connectors.AzureAIInference.UnitTests\Connectors.AzureAIInference.UnitTests.csproj", "{E0D45DDB-6D32-40FC-AC79-E1F342C4F513}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "OnnxSimpleRAG", "samples\Demos\OnnxSimpleRAG\OnnxSimpleRAG.csproj", "{8972254B-B8F0-4119-953B-378E3BACA59A}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "process", "process", "{0D8C6358-5DAA-4EA6-A924-C268A9A21BC9}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Process.Abstractions", "src\Experimental\Process.Abstractions\Process.Abstractions.csproj", "{EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Process.Core", "src\Experimental\Process.Core\Process.Core.csproj", "{685853FD-F1FD-4B8E-A050-3404B8215C8E}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Process.LocalRuntime", "src\Experimental\Process.LocalRuntime\Process.LocalRuntime.csproj", "{27AF60D6-86F5-4591-A700-4F8C93F41B11}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Process.UnitTests", "src\Experimental\Process.UnitTests\Process.UnitTests.csproj", "{21A32285-8443-4A75-B2E8-27E6090EC562}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "GettingStartedWithProcesses", "samples\GettingStartedWithProcesses\GettingStartedWithProcesses.csproj", "{C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -557,18 +596,6 @@ Global {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Publish|Any CPU.Build.0 = Debug|Any CPU {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Release|Any CPU.ActiveCfg = Release|Any CPU {3FC4A81B-8ABE-473F-BC7C-6F4885775534}.Release|Any CPU.Build.0 = Release|Any CPU - {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Publish|Any CPU.ActiveCfg = Publish|Any CPU - {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Publish|Any CPU.Build.0 = Publish|Any CPU - {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5438D1E3-E03D-444B-BBBA-478F93161AA8}.Release|Any CPU.Build.0 = Release|Any CPU - {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Publish|Any CPU.Build.0 = Debug|Any CPU - {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4AD80279-9AC1-476F-8103-E6CD5E4FD525}.Release|Any CPU.Build.0 = Release|Any CPU {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Debug|Any CPU.Build.0 = Debug|Any CPU {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B}.Publish|Any CPU.ActiveCfg = Publish|Any CPU @@ -791,18 +818,114 @@ Global {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Publish|Any CPU.Build.0 = Debug|Any CPU {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Release|Any CPU.ActiveCfg = Release|Any CPU {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}.Release|Any CPU.Build.0 = Release|Any CPU - {38374C62-0263-4FE8-A18C-70FC8132912B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {38374C62-0263-4FE8-A18C-70FC8132912B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.ActiveCfg = Debug|Any CPU - {38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.Build.0 = Debug|Any CPU - {38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.Build.0 = Release|Any CPU + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Publish|Any CPU.Build.0 = Publish|Any CPU + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD}.Release|Any CPU.Build.0 = Release|Any CPU + {2918478E-BC86-4D53-9D01-9C318F80C14F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2918478E-BC86-4D53-9D01-9C318F80C14F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2918478E-BC86-4D53-9D01-9C318F80C14F}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {2918478E-BC86-4D53-9D01-9C318F80C14F}.Publish|Any CPU.Build.0 = Debug|Any CPU + {2918478E-BC86-4D53-9D01-9C318F80C14F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2918478E-BC86-4D53-9D01-9C318F80C14F}.Release|Any CPU.Build.0 = Release|Any CPU {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.Build.0 = Debug|Any CPU {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.ActiveCfg = Debug|Any CPU {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.Build.0 = Debug|Any CPU {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.ActiveCfg = Release|Any CPU {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.Build.0 = Release|Any CPU + {924DB138-1223-4C99-B6E6-0938A3FA14EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {924DB138-1223-4C99-B6E6-0938A3FA14EF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {924DB138-1223-4C99-B6E6-0938A3FA14EF}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {924DB138-1223-4C99-B6E6-0938A3FA14EF}.Publish|Any CPU.Build.0 = Debug|Any CPU + {924DB138-1223-4C99-B6E6-0938A3FA14EF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {924DB138-1223-4C99-B6E6-0938A3FA14EF}.Release|Any CPU.Build.0 = Release|Any CPU + {38374C62-0263-4FE8-A18C-70FC8132912B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {38374C62-0263-4FE8-A18C-70FC8132912B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.Build.0 = Debug|Any CPU + {38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.Build.0 = Release|Any CPU + {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Publish|Any CPU.Build.0 = Debug|Any CPU + {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Release|Any CPU.Build.0 = Release|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Debug|Any CPU.Build.0 = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Publish|Any CPU.Build.0 = Debug|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Release|Any CPU.ActiveCfg = Release|Any CPU + {36DDC119-C030-407E-AC51-A877E9E0F660}.Release|Any CPU.Build.0 = Release|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Publish|Any CPU.Build.0 = Publish|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Release|Any CPU.Build.0 = Release|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Publish|Any CPU.Build.0 = Debug|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Release|Any CPU.Build.0 = Release|Any CPU + {E8FC97B0-B417-4A90-993C-B8AA9223B058}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E8FC97B0-B417-4A90-993C-B8AA9223B058}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E8FC97B0-B417-4A90-993C-B8AA9223B058}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {E8FC97B0-B417-4A90-993C-B8AA9223B058}.Publish|Any CPU.Build.0 = Debug|Any CPU + {E8FC97B0-B417-4A90-993C-B8AA9223B058}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E8FC97B0-B417-4A90-993C-B8AA9223B058}.Release|Any CPU.Build.0 = Release|Any CPU + {063044B2-A901-43C5-BFDF-5E4E71C7BC33}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {063044B2-A901-43C5-BFDF-5E4E71C7BC33}.Debug|Any CPU.Build.0 = Debug|Any CPU + {063044B2-A901-43C5-BFDF-5E4E71C7BC33}.Publish|Any CPU.ActiveCfg = Publish|Any CPU + {063044B2-A901-43C5-BFDF-5E4E71C7BC33}.Publish|Any CPU.Build.0 = Publish|Any CPU + {063044B2-A901-43C5-BFDF-5E4E71C7BC33}.Release|Any CPU.ActiveCfg = Release|Any CPU + {063044B2-A901-43C5-BFDF-5E4E71C7BC33}.Release|Any CPU.Build.0 = Release|Any CPU + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513}.Publish|Any CPU.Build.0 = Debug|Any CPU + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513}.Release|Any CPU.Build.0 = Release|Any CPU + {8972254B-B8F0-4119-953B-378E3BACA59A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8972254B-B8F0-4119-953B-378E3BACA59A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8972254B-B8F0-4119-953B-378E3BACA59A}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {8972254B-B8F0-4119-953B-378E3BACA59A}.Publish|Any CPU.Build.0 = Debug|Any CPU + {8972254B-B8F0-4119-953B-378E3BACA59A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8972254B-B8F0-4119-953B-378E3BACA59A}.Release|Any CPU.Build.0 = Release|Any CPU + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}.Publish|Any CPU.Build.0 = Debug|Any CPU + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F}.Release|Any CPU.Build.0 = Release|Any CPU + {685853FD-F1FD-4B8E-A050-3404B8215C8E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {685853FD-F1FD-4B8E-A050-3404B8215C8E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {685853FD-F1FD-4B8E-A050-3404B8215C8E}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {685853FD-F1FD-4B8E-A050-3404B8215C8E}.Publish|Any CPU.Build.0 = Debug|Any CPU + {685853FD-F1FD-4B8E-A050-3404B8215C8E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {685853FD-F1FD-4B8E-A050-3404B8215C8E}.Release|Any CPU.Build.0 = Release|Any CPU + {27AF60D6-86F5-4591-A700-4F8C93F41B11}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {27AF60D6-86F5-4591-A700-4F8C93F41B11}.Debug|Any CPU.Build.0 = Debug|Any CPU + {27AF60D6-86F5-4591-A700-4F8C93F41B11}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {27AF60D6-86F5-4591-A700-4F8C93F41B11}.Publish|Any CPU.Build.0 = Debug|Any CPU + {27AF60D6-86F5-4591-A700-4F8C93F41B11}.Release|Any CPU.ActiveCfg = Release|Any CPU + {27AF60D6-86F5-4591-A700-4F8C93F41B11}.Release|Any CPU.Build.0 = Release|Any CPU + {21A32285-8443-4A75-B2E8-27E6090EC562}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {21A32285-8443-4A75-B2E8-27E6090EC562}.Debug|Any CPU.Build.0 = Debug|Any CPU + {21A32285-8443-4A75-B2E8-27E6090EC562}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {21A32285-8443-4A75-B2E8-27E6090EC562}.Publish|Any CPU.Build.0 = Debug|Any CPU + {21A32285-8443-4A75-B2E8-27E6090EC562}.Release|Any CPU.ActiveCfg = Release|Any CPU + {21A32285-8443-4A75-B2E8-27E6090EC562}.Release|Any CPU.Build.0 = Release|Any CPU + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}.Publish|Any CPU.ActiveCfg = Debug|Any CPU + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}.Publish|Any CPU.Build.0 = Debug|Any CPU + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -839,6 +962,8 @@ Global {6AAB0620-33A1-4A98-A63B-6560B9BA47A4} = {24503383-A8C4-4255-9998-28D70FE8E99A} {50FAE231-6F24-4779-9D02-12ABBC9A49E2} = {24503383-A8C4-4255-9998-28D70FE8E99A} {5C246969-D794-4EC3-8E8F-F90D4D166420} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} + {314A2705-0F70-44B6-8988-C6DF77BDFD42} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} + {C7299F56-3A55-471E-B10E-B1FBE101C625} = {314A2705-0F70-44B6-8988-C6DF77BDFD42} {958AD708-F048-4FAF-94ED-D2F2B92748B9} = {4D3DAE63-41C6-4E1C-A35A-E77BDFC40675} {29E7D971-1308-4171-9872-E8E4669A1134} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} {B00AD427-0047-4850-BEF9-BA8237EA9D8B} = {958AD708-F048-4FAF-94ED-D2F2B92748B9} @@ -866,8 +991,6 @@ Global {3FC4A81B-8ABE-473F-BC7C-6F4885775534} = {A21FAC7C-0C09-4EAD-843B-926ACEF73C80} {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {24503383-A8C4-4255-9998-28D70FE8E99A} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {5438D1E3-E03D-444B-BBBA-478F93161AA8} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} - {4AD80279-9AC1-476F-8103-E6CD5E4FD525} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} {B0CE8C69-EC56-4825-94AB-01CA7E8BA55B} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} {3A4B8F90-3E74-43E0-800C-84F8AA9B5BF3} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} {731CC542-8BE9-42D4-967D-99206EC2B310} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} @@ -911,8 +1034,26 @@ Global {B0B3901E-AF56-432B-8FAA-858468E5D0DF} = {24503383-A8C4-4255-9998-28D70FE8E99A} {1D4667B9-9381-4E32-895F-123B94253EE8} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} - {38374C62-0263-4FE8-A18C-70FC8132912B} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {E7E60E1D-1A44-4DE9-A44D-D5052E809DDD} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {2918478E-BC86-4D53-9D01-9C318F80C14F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} {E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {924DB138-1223-4C99-B6E6-0938A3FA14EF} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {38374C62-0263-4FE8-A18C-70FC8132912B} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {385A8FE5-87E2-4458-AE09-35E10BD2E67F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {36DDC119-C030-407E-AC51-A877E9E0F660} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {8CF06B22-50F3-4F71-A002-622DB49DF0F5} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {F58468D3-D635-4774-98B1-E1B5DE90A7FF} = {C7299F56-3A55-471E-B10E-B1FBE101C625} + {E8FC97B0-B417-4A90-993C-B8AA9223B058} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C} + {063044B2-A901-43C5-BFDF-5E4E71C7BC33} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {E0D45DDB-6D32-40FC-AC79-E1F342C4F513} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1} + {8972254B-B8F0-4119-953B-378E3BACA59A} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263} + {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} = {A2357CF8-3BB9-45A1-93F1-B366C9B63658} + {EE3CCD38-0F10-45F3-8B4E-2A1B31DCEF5F} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} + {685853FD-F1FD-4B8E-A050-3404B8215C8E} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} + {27AF60D6-86F5-4591-A700-4F8C93F41B11} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} + {21A32285-8443-4A75-B2E8-27E6090EC562} = {0D8C6358-5DAA-4EA6-A924-C268A9A21BC9} + {C057ACDF-DDD8-496B-BAF9-1C6E4E1248D7} = {FA3720F1-C99A-49B2-9577-A940257098BF} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83} diff --git a/dotnet/SK-dotnet.sln.DotSettings b/dotnet/SK-dotnet.sln.DotSettings index 091a6854bc6b..d8964e230315 100644 --- a/dotnet/SK-dotnet.sln.DotSettings +++ b/dotnet/SK-dotnet.sln.DotSettings @@ -221,7 +221,6 @@ public void It$SOMENAME$() True True True - True True True True diff --git a/dotnet/docs/EXPERIMENTS.md b/dotnet/docs/EXPERIMENTS.md index 8cc9287ff55e..f4268d498c98 100644 --- a/dotnet/docs/EXPERIMENTS.md +++ b/dotnet/docs/EXPERIMENTS.md @@ -21,6 +21,7 @@ You can use the following diagnostic IDs to ignore warnings or errors for a part | SKEXP0050 | Out-of-the-box plugins | | SKEXP0060 | Planners | | SKEXP0070 | AI connectors | +| SKEXP0080 | Processes | | SKEXP0100 | Advanced Semantic Kernel features | | SKEXP0110 | Semantic Kernel Agents | @@ -76,6 +77,8 @@ You can use the following diagnostic IDs to ignore warnings or errors for a part | SKEXP0070 | ONNX AI connector | | SKEXP0070 | Hugging Face AI connector | | | | | | | | | +| SKEXP0080 | Process Framework | +| | | | | | | | | SKEXP0101 | Experiment with Assistants | | SKEXP0101 | Experiment with Flow Orchestration | | | | | | | | | diff --git a/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md b/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md new file mode 100644 index 000000000000..00cf243fc193 --- /dev/null +++ b/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md @@ -0,0 +1,216 @@ +# OpenAI Connector Migration Guide + +This manual prepares you for the migration of your OpenAI Connector to the new OpenAI Connector. The new OpenAI Connector is a complete rewrite of the existing OpenAI Connector and is designed to be more efficient, reliable, and scalable. This manual will guide you through the migration process and help you understand the changes that have been made to the OpenAI Connector. + +## 1. Package Setup when Using Azure + +If you are working with Azure and or OpenAI public APIs, you will need to change the package from `Microsoft.SemanticKernel.Connectors.OpenAI` to `Microsoft.SemanticKernel.Connectors.AzureOpenAI`, + +> [!IMPORTANT] +> The `Microsoft.SemanticKernel.Connectors.AzureOpenAI` package depends on the `Microsoft.SemanticKernel.Connectors.OpenAI` package so there's no need to add both to your project when using `OpenAI` related types. + +```diff +- // Before +- using Microsoft.SemanticKernel.Connectors.OpenAI; ++ After ++ using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +``` + +### 1.1 AzureOpenAIClient + +When using Azure with OpenAI, before where you were using `OpenAIClient` you will need to update your code to use the new `AzureOpenAIClient` type. + +### 1.2 Services + +All services below now belong to the `Microsoft.SemanticKernel.Connectors.AzureOpenAI` namespace. + +- `AzureOpenAIAudioToTextService` +- `AzureOpenAIChatCompletionService` +- `AzureOpenAITextEmbeddingGenerationService` +- `AzureOpenAITextToAudioService` +- `AzureOpenAITextToImageService` + +## 2. Text Generation Deprecated + +The latest `OpenAI` SDK does not support text generation modality, when migrating to their underlying SDK we had to drop the support and removed `TextGeneration` specific services but the existing `ChatCompletion` ones still supports (implements `ITextGenerationService`). + +If you were using any of the `OpenAITextGenerationService` or `AzureOpenAITextGenerationService` you will need to update your code to target a chat completion model instead, using `OpenAIChatCompletionService` or `AzureOpenAIChatCompletionService` instead. + +> [!NOTE] +> OpenAI and AzureOpenAI `ChatCompletion` services also implement the `ITextGenerationService` interface and that may not require any changes to your code if you were targeting the `ITextGenerationService` interface. + +tags: +`OpenAITextGenerationService`,`AzureOpenAITextGenerationService`, +`AddOpenAITextGeneration`,`AddAzureOpenAITextGeneration` + +## 3. ChatCompletion Multiple Choices Deprecated + +The latest `OpenAI` SDK does not support multiple choices, when migrating to their underlying SDK we had to drop the support and removed `ResultsPerPrompt` also from the `OpenAIPromptExecutionSettings`. + +tags: `ResultsPerPrompt`,`results_per_prompt` + +## 4. OpenAI File Service Deprecation + +The `OpenAIFileService` was deprecated in the latest version of the OpenAI Connector. We strongly recommend to update your code to use the new `OpenAIClient.GetFileClient()` for file management operations. + +## 5. OpenAI ChatCompletion custom endpoint + +The `OpenAIChatCompletionService` **experimental** constructor for custom endpoints will not attempt to auto-correct the endpoint and use it as is. + +We have the two only specific cases where we attempted to auto-correct the endpoint. + +1. If you provided `chat/completions` path before. Now those need to be removed as they are added automatically to the end of your original endpoint by `OpenAI SDK`. + + ```diff + - http://any-host-and-port/v1/chat/completions + + http://any-host-and-port/v1 + ``` + +2. If you provided a custom endpoint without any path. We won't be adding the `v1/` as the first path. Now the `v1` path needs to provided as part of your endpoint. + + ```diff + - http://any-host-and-port/ + + http://any-host-and-port/v1 + ``` + +## 6. SemanticKernel MetaPackage + +To be retro compatible with the new OpenAI and AzureOpenAI Connectors, our `Microsoft.SemanticKernel` meta package changed its dependency to use the new `Microsoft.SemanticKernel.Connectors.AzureOpenAI` package that depends on the `Microsoft.SemanticKernel.Connectors.OpenAI` package. This way if you are using the metapackage, no change is needed to get access to `Azure` related types. + +## 7. Contents + +### 7.1 OpenAIChatMessageContent + +- The `Tools` property type has changed from `IReadOnlyList` to `IReadOnlyList`. + +- Inner content type has changed from `ChatCompletionsFunctionToolCall` to `ChatToolCall`. + +- Metadata type `FunctionToolCalls` has changed from `IEnumerable` to `IEnumerable`. + +### 7.2 OpenAIStreamingChatMessageContent + +- The `FinishReason` property type has changed from `CompletionsFinishReason` to `FinishReason`. +- The `ToolCallUpdate` property has been renamed to `ToolCallUpdates` and its type has changed from `StreamingToolCallUpdate?` to `IReadOnlyList?`. +- The `AuthorName` property is not initialized because it's not provided by the underlying library anymore. + +## 7.3 Metrics for AzureOpenAI Connector + +The meter `s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI");` and the relevant counters still have old names that contain "openai" in them, such as: + +- `semantic_kernel.connectors.openai.tokens.prompt` +- `semantic_kernel.connectors.openai.tokens.completion` +- `semantic_kernel.connectors.openai.tokens.total` + +## 8. Using Azure with your data (Data Sources) + +With the new `AzureOpenAIClient`, you can now specify your datasource thru the options and that requires a small change in your code to the new type. + +Before + +```csharp +var promptExecutionSettings = new OpenAIPromptExecutionSettings +{ + AzureChatExtensionsOptions = new AzureChatExtensionsOptions + { + Extensions = [ new AzureSearchChatExtensionConfiguration + { + SearchEndpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), + Authentication = new OnYourDataApiKeyAuthenticationOptions(TestConfiguration.AzureAISearch.ApiKey), + IndexName = TestConfiguration.AzureAISearch.IndexName + }] + }; +}; +``` + +After + +```csharp +var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings +{ + AzureChatDataSource = new AzureSearchChatDataSource + { + Endpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), + Authentication = DataSourceAuthentication.FromApiKey(TestConfiguration.AzureAISearch.ApiKey), + IndexName = TestConfiguration.AzureAISearch.IndexName + } +}; +``` + +## 9. Breaking glass scenarios + +Breaking glass scenarios are scenarios where you may need to update your code to use the new OpenAI Connector. Below are some of the breaking changes that you may need to be aware of. + +#### 9.1 KernelContent Metadata + +Some of the keys in the content metadata dictionary have changed, you will need to update your code to when using the previous key names. + +- `Created` -> `CreatedAt` + +#### 9.2 Prompt Filter Results + +The `PromptFilterResults` metadata type has changed from `IReadOnlyList` to `ContentFilterResultForPrompt`. + +#### 9.3 Content Filter Results + +The `ContentFilterResultsForPrompt` type has changed from `ContentFilterResultsForChoice` to `ContentFilterResultForResponse`. + +#### 9.4 Finish Reason + +The FinishReason metadata string value has changed from `stop` to `Stop` + +#### 9.5 Tool Calls + +The ToolCalls metadata string value has changed from `tool_calls` to `ToolCalls` + +#### 9.6 LogProbs / Log Probability Info + +The `LogProbabilityInfo` type has changed from `ChatChoiceLogProbabilityInfo` to `IReadOnlyList`. + +#### 9.7 Finish Details, Index, and Enhancements + +All of above have been removed. + +#### 9.8 Token Usage + +The Token usage naming convention from `OpenAI` changed from `Completion`, `Prompt` tokens to `Output` and `Input` respectively. You will need to update your code to use the new naming. + +The type also changed from `CompletionsUsage` to `ChatTokenUsage`. + +[Example of Token Usage Metadata Changes](https://github.com/microsoft/semantic-kernel/pull/7151/files#diff-a323107b9f8dc8559a83e50080c6e34551ddf6d9d770197a473f249589e8fb47) + +```diff +- Before +- var usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage; +- var completionTokesn = usage?.CompletionTokens ?? 0; +- var promptTokens = usage?.PromptTokens ?? 0; + ++ After ++ var usage = FunctionResult.Metadata?["Usage"] as ChatTokenUsage; ++ var promptTokens = usage?.InputTokens ?? 0; ++ var completionTokens = completionTokens: usage?.OutputTokens ?? 0; + +totalTokens: usage?.TotalTokens ?? 0; +``` + +#### 9.9 OpenAIClient + +The `OpenAIClient` type previously was a Azure specific namespace type but now it is an `OpenAI` SDK namespace type, you will need to update your code to use the new `OpenAIClient` type. + +When using Azure, you will need to update your code to use the new `AzureOpenAIClient` type. + +#### 9.10 Pipeline Configuration + +The new `OpenAI` SDK uses a different pipeline configuration, and has a dependency on `System.ClientModel` package. You will need to update your code to use the new `HttpClientPipelineTransport` transport configuration where before you were using `HttpClientTransport` from `Azure.Core.Pipeline`. + +[Example of Pipeline Configuration](https://github.com/microsoft/semantic-kernel/pull/7151/files#diff-fab02d9a75bf43cb57f71dddc920c3f72882acf83fb125d8cad963a643d26eb3) + +```diff +var clientOptions = new OpenAIClientOptions +{ +- // Before: From Azure.Core.Pipeline +- Transport = new HttpClientTransport(httpClient), + ++ // After: From OpenAI SDK -> System.ClientModel ++ Transport = new HttpClientPipelineTransport(httpClient), +}; +``` diff --git a/dotnet/notebooks/01-basic-loading-the-kernel.ipynb b/dotnet/notebooks/01-basic-loading-the-kernel.ipynb index f9d7e5b8abe4..938a38b1f685 100644 --- a/dotnet/notebooks/01-basic-loading-the-kernel.ipynb +++ b/dotnet/notebooks/01-basic-loading-the-kernel.ipynb @@ -32,7 +32,7 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"" + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"" ] }, { @@ -111,7 +111,7 @@ " serviceId: \"Azure_curie\" // alias used in the prompt templates' config.json\n", ")\n", ".AddOpenAIChatCompletion(\n", - " \"gpt-3.5-turbo\", // OpenAI Model Name\n", + " \"gpt-4o-mini\", // OpenAI Model Name\n", " \"...your OpenAI API Key...\", // OpenAI API key\n", " \"...your OpenAI Org ID...\", // *optional* OpenAI Organization ID\n", " serviceId: \"OpenAI_davinci\" // alias used in the prompt templates' config.json\n", diff --git a/dotnet/notebooks/02-running-prompts-from-file.ipynb b/dotnet/notebooks/02-running-prompts-from-file.ipynb index 2475712372c8..bf5a43317428 100644 --- a/dotnet/notebooks/02-running-prompts-from-file.ipynb +++ b/dotnet/notebooks/02-running-prompts-from-file.ipynb @@ -93,7 +93,7 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", "\n", "#!import config/Settings.cs\n", "\n", diff --git a/dotnet/notebooks/03-semantic-function-inline.ipynb b/dotnet/notebooks/03-semantic-function-inline.ipynb index 3ea79d955c37..0512692e3518 100644 --- a/dotnet/notebooks/03-semantic-function-inline.ipynb +++ b/dotnet/notebooks/03-semantic-function-inline.ipynb @@ -51,7 +51,7 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", "\n", "#!import config/Settings.cs\n", "\n", diff --git a/dotnet/notebooks/04-kernel-arguments-chat.ipynb b/dotnet/notebooks/04-kernel-arguments-chat.ipynb index 9af04e818fae..18a156e3d28b 100644 --- a/dotnet/notebooks/04-kernel-arguments-chat.ipynb +++ b/dotnet/notebooks/04-kernel-arguments-chat.ipynb @@ -30,7 +30,7 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", "#!import config/Settings.cs\n", "\n", "using Microsoft.SemanticKernel;\n", diff --git a/dotnet/notebooks/05-using-the-planner.ipynb b/dotnet/notebooks/05-using-the-planner.ipynb index e58f351ae721..6584fb329620 100644 --- a/dotnet/notebooks/05-using-the-planner.ipynb +++ b/dotnet/notebooks/05-using-the-planner.ipynb @@ -25,8 +25,8 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Planners.Handlebars, 1.11.1-preview\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Planners.Handlebars, 1.19.0-preview\"\n", "\n", "#!import config/Settings.cs\n", "#!import config/Utils.cs\n", diff --git a/dotnet/notebooks/06-memory-and-embeddings.ipynb b/dotnet/notebooks/06-memory-and-embeddings.ipynb index a1656d450edc..1c2d839d8439 100644 --- a/dotnet/notebooks/06-memory-and-embeddings.ipynb +++ b/dotnet/notebooks/06-memory-and-embeddings.ipynb @@ -33,8 +33,8 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Plugins.Memory, 1.11.1-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Memory, 1.19.0-alpha\"\n", "#r \"nuget: System.Linq.Async, 6.0.1\"\n", "\n", "#!import config/Settings.cs\n", @@ -85,6 +85,8 @@ "outputs": [], "source": [ "using Microsoft.SemanticKernel.Memory;\n", + "using Microsoft.SemanticKernel.Embeddings;\n", + "using Microsoft.SemanticKernel.Connectors.AzureOpenAI;\n", "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", "\n", "// Memory functionality is experimental\n", @@ -92,24 +94,12 @@ "\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", - "var memoryBuilder = new MemoryBuilder();\n", - "\n", - "if (useAzureOpenAI)\n", - "{\n", - " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\n", - " \"text-embedding-ada-002\",\n", - " azureEndpoint, \n", - " apiKey,\n", - " \"model-id\");\n", - "}\n", - "else\n", - "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", - "}\n", + "var modelId = \"text-embedding-ada-002\";\n", + "ITextEmbeddingGenerationService textEmbeddingService = useAzureOpenAI\n", + " ? new AzureOpenAITextEmbeddingGenerationService(deploymentName: modelId, endpoint: azureEndpoint, apiKey: apiKey)\n", + " : new OpenAITextEmbeddingGenerationService(modelId: modelId, apiKey: apiKey);\n", "\n", - "memoryBuilder.WithMemoryStore(new VolatileMemoryStore());\n", - "\n", - "var memory = memoryBuilder.Build();" + "var memory = new SemanticTextMemory(new VolatileMemoryStore(), textEmbeddingService);" ] }, { @@ -465,22 +455,12 @@ "\n", "var memoryBuilder = new MemoryBuilder();\n", "\n", - "if (useAzureOpenAI)\n", - "{\n", - " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\n", - " \"text-embedding-ada-002\",\n", - " azureEndpoint, \n", - " apiKey,\n", - " \"model-id\");\n", - "}\n", - "else\n", - "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", - "}\n", - "\n", - "memoryBuilder.WithMemoryStore(new VolatileMemoryStore());\n", + "var modelId = \"text-embedding-ada-002\";\n", + "ITextEmbeddingGenerationService textEmbeddingService = useAzureOpenAI\n", + " ? new AzureOpenAITextEmbeddingGenerationService(deploymentName: modelId, endpoint: azureEndpoint, apiKey: apiKey)\n", + " : new OpenAITextEmbeddingGenerationService(modelId: modelId, apiKey: apiKey);\n", "\n", - "var memory = memoryBuilder.Build();" + "var memory = new SemanticTextMemory(new VolatileMemoryStore(), textEmbeddingService);" ] }, { diff --git a/dotnet/notebooks/07-DALL-E-3.ipynb b/dotnet/notebooks/07-DALL-E-3.ipynb index 4c0ef213e87b..88690c85adee 100644 --- a/dotnet/notebooks/07-DALL-E-3.ipynb +++ b/dotnet/notebooks/07-DALL-E-3.ipynb @@ -33,7 +33,7 @@ "source": [ "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n", "\n", - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", "#r \"nuget: System.Numerics.Tensors, 8.0.0\"\n", "#r \"nuget: SkiaSharp, 2.88.3\"\n", "\n", diff --git a/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb b/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb index c573f57cf2fc..089fd1485eb7 100644 --- a/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb +++ b/dotnet/notebooks/08-chatGPT-with-DALL-E-3.ipynb @@ -56,7 +56,7 @@ "source": [ "// Usual setup: importing Semantic Kernel SDK and SkiaSharp, used to display images inline.\n", "\n", - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", "#r \"nuget: SkiaSharp, 2.88.3\"\n", "\n", "#!import config/Settings.cs\n", @@ -108,12 +108,12 @@ "\n", "if(useAzureOpenAI)\n", "{\n", - " builder.AddAzureOpenAIChatCompletion(\"gpt-35-turbo\", azureEndpoint, apiKey);\n", + " builder.AddAzureOpenAIChatCompletion(\"gpt-4o-mini\", azureEndpoint, apiKey);\n", " builder.AddAzureOpenAITextToImage(\"dall-e-3\", azureEndpoint, apiKey);\n", "}\n", "else\n", "{\n", - " builder.AddOpenAIChatCompletion(\"gpt-3.5-turbo\", apiKey, orgId);\n", + " builder.AddOpenAIChatCompletion(\"gpt-4o-mini\", apiKey, orgId);\n", " builder.AddOpenAITextToImage(apiKey, orgId);\n", "}\n", "\n", diff --git a/dotnet/notebooks/09-memory-with-chroma.ipynb b/dotnet/notebooks/09-memory-with-chroma.ipynb index 66a93ec523b6..2bba5f2e7667 100644 --- a/dotnet/notebooks/09-memory-with-chroma.ipynb +++ b/dotnet/notebooks/09-memory-with-chroma.ipynb @@ -38,9 +38,9 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Connectors.Chroma, 1.11.1-alpha\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Plugins.Memory, 1.11.1-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Connectors.Chroma, 1.19.0-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Memory, 1.19.0-alpha\"\n", "#r \"nuget: System.Linq.Async, 6.0.1\"\n", "\n", "#!import config/Settings.cs\n", @@ -102,26 +102,18 @@ "source": [ "#pragma warning disable SKEXP0001, SKEXP0010, SKEXP0020, SKEXP0050\n", "\n", + "using Microsoft.SemanticKernel.Connectors.AzureOpenAI;\n", "using Microsoft.SemanticKernel.Connectors.OpenAI;\n", + "using Microsoft.SemanticKernel.Embeddings;\n", "\n", "var (useAzureOpenAI, model, azureEndpoint, apiKey, orgId) = Settings.LoadFromFile();\n", "\n", - "var memoryBuilder = new MemoryBuilder();\n", + "var modelId = \"text-embedding-ada-002\";\n", + "ITextEmbeddingGenerationService textEmbeddingService = useAzureOpenAI\n", + " ? new AzureOpenAITextEmbeddingGenerationService(deploymentName: modelId, endpoint: azureEndpoint, apiKey: apiKey)\n", + " : new OpenAITextEmbeddingGenerationService(modelId: modelId, apiKey: apiKey);\n", "\n", - "if (useAzureOpenAI)\n", - "{\n", - " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", azureEndpoint, apiKey, \"model-id\");\n", - "}\n", - "else\n", - "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", - "}\n", - "\n", - "var chromaMemoryStore = new ChromaMemoryStore(\"http://127.0.0.1:8000\");\n", - "\n", - "memoryBuilder.WithMemoryStore(chromaMemoryStore);\n", - "\n", - "var memory = memoryBuilder.Build();" + "var memory = new SemanticTextMemory(new VolatileMemoryStore(), textEmbeddingService);" ] }, { @@ -472,22 +464,12 @@ "source": [ "#pragma warning disable SKEXP0001, SKEXP0010, SKEXP0020, SKEXP0050\n", "\n", - "var memoryBuilder = new MemoryBuilder();\n", - "\n", - "if (useAzureOpenAI)\n", - "{\n", - " memoryBuilder.WithAzureOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", azureEndpoint, apiKey, \"model-id\");\n", - "}\n", - "else\n", - "{\n", - " memoryBuilder.WithOpenAITextEmbeddingGeneration(\"text-embedding-ada-002\", apiKey);\n", - "}\n", - "\n", - "var chromaMemoryStore = new ChromaMemoryStore(\"http://127.0.0.1:8000\");\n", - "\n", - "memoryBuilder.WithMemoryStore(chromaMemoryStore);\n", + "var modelId = \"text-embedding-ada-002\";\n", + "ITextEmbeddingGenerationService textEmbeddingService = useAzureOpenAI\n", + " ? new AzureOpenAITextEmbeddingGenerationService(deploymentName: modelId, endpoint: azureEndpoint, apiKey: apiKey)\n", + " : new OpenAITextEmbeddingGenerationService(modelId: modelId, apiKey: apiKey);\n", "\n", - "var memory = memoryBuilder.Build();" + "var memory = new SemanticTextMemory(new VolatileMemoryStore(), textEmbeddingService);" ] }, { diff --git a/dotnet/notebooks/10-BingSearch-using-kernel.ipynb b/dotnet/notebooks/10-BingSearch-using-kernel.ipynb index 2f5534b79cbb..5a66e8d1c11a 100644 --- a/dotnet/notebooks/10-BingSearch-using-kernel.ipynb +++ b/dotnet/notebooks/10-BingSearch-using-kernel.ipynb @@ -35,9 +35,9 @@ }, "outputs": [], "source": [ - "#r \"nuget: Microsoft.SemanticKernel, 1.11.1\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Plugins.Web, 1.11.1-alpha\"\n", - "#r \"nuget: Microsoft.SemanticKernel.Plugins.Core, 1.11.1-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel, 1.19.0\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Web, 1.19.0-alpha\"\n", + "#r \"nuget: Microsoft.SemanticKernel.Plugins.Core, 1.19.0-alpha\"\n", "\n", "#!import config/Settings.cs\n", "#!import config/Utils.cs\n", diff --git a/dotnet/notebooks/config/Settings.cs b/dotnet/notebooks/config/Settings.cs index 498d5afaae12..a3a5cfc8ba6b 100644 --- a/dotnet/notebooks/config/Settings.cs +++ b/dotnet/notebooks/config/Settings.cs @@ -59,7 +59,7 @@ public static async Task AskModel(bool _useAzureOpenAI = true, string co else { // Use the best model by default, and reduce the setup friction, particularly in VS Studio. - model = "gpt-3.5-turbo"; + model = "gpt-4o-mini"; } } diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props index 00837d71f910..ee7df54cbda2 100644 --- a/dotnet/nuget/nuget-package.props +++ b/dotnet/nuget/nuget-package.props @@ -1,8 +1,7 @@ - 1.17.1 - + 1.21.1 $(VersionPrefix)-$(VersionSuffix) $(VersionPrefix) @@ -10,7 +9,7 @@ true - 1.17.0 + 1.21.1 $(NoWarn);CP0003 diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs index 16c019aebbfd..e1612bfc83c1 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs @@ -12,7 +12,7 @@ namespace Agents; /// Demonstrate usage of for both direction invocation /// of and via . /// -public class ChatCompletion_FunctionTermination(ITestOutputHelper output) : BaseTest(output) +public class ChatCompletion_FunctionTermination(ITestOutputHelper output) : BaseAgentsTest(output) { [Fact] public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() @@ -23,7 +23,7 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -38,31 +38,25 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync() await InvokeAgentAsync("What is the special drink?"); await InvokeAgentAsync("Thank you"); - // Display the chat history. - Console.WriteLine("================================"); - Console.WriteLine("CHAT HISTORY"); - Console.WriteLine("================================"); - foreach (ChatMessageContent message in chat) - { - this.WriteContent(message); - } + // Display the entire chat history. + WriteChatHistory(chat); // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - ChatMessageContent userContent = new(AuthorRole.User, input); - chat.Add(userContent); - this.WriteContent(userContent); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in agent.InvokeAsync(chat)) + await foreach (ChatMessageContent response in agent.InvokeAsync(chat)) { // Do not add a message implicitly added to the history. - if (!content.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)) + if (!response.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)) { - chat.Add(content); + chat.Add(response); } - this.WriteContent(content); + this.WriteAgentChatMessage(response); } } } @@ -76,7 +70,7 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync() { Instructions = "Answer questions about the menu.", Kernel = CreateKernelWithFilter(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -91,33 +85,148 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync() await InvokeAgentAsync("What is the special drink?"); await InvokeAgentAsync("Thank you"); - // Display the chat history. - Console.WriteLine("================================"); - Console.WriteLine("CHAT HISTORY"); - Console.WriteLine("================================"); - ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); - for (int index = history.Length; index > 0; --index) + // Display the entire chat history. + WriteChatHistory(await chat.GetChatMessagesAsync().ToArrayAsync()); + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) { - this.WriteContent(history[index - 1]); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) + { + this.WriteAgentChatMessage(response); + } } + } + + [Fact] + public async Task UseAutoFunctionInvocationFilterWithStreamingAgentInvocationAsync() + { + // Define the agent + ChatCompletionAgent agent = + new() + { + Instructions = "Answer questions about the menu.", + Kernel = CreateKernelWithFilter(), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + }; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + /// Create the chat history to capture the agent interaction. + ChatHistory chat = []; + + // Respond to user input, invoking functions where appropriate. + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup?"); + await InvokeAgentAsync("What is the special drink?"); + await InvokeAgentAsync("Thank you"); + + // Display the entire chat history. + WriteChatHistory(chat); // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - ChatMessageContent userContent = new(AuthorRole.User, input); - chat.AddChatMessage(userContent); - this.WriteContent(userContent); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + int historyCount = chat.Count; + + bool isFirst = false; + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(chat)) { - this.WriteContent(content); + if (string.IsNullOrEmpty(response.Content)) + { + continue; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + if (historyCount <= chat.Count) + { + for (int index = historyCount; index < chat.Count; index++) + { + this.WriteAgentChatMessage(chat[index]); + } } } } - private void WriteContent(ChatMessageContent content) + [Fact] + public async Task UseAutoFunctionInvocationFilterWithStreamingAgentChatAsync() { - Console.WriteLine($"[{content.Items.LastOrDefault()?.GetType().Name ?? "(empty)"}] {content.Role} : '{content.Content}'"); + // Define the agent + ChatCompletionAgent agent = + new() + { + Instructions = "Answer questions about the menu.", + Kernel = CreateKernelWithFilter(), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + }; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a chat for agent interaction. + AgentGroupChat chat = new(); + + // Respond to user input, invoking functions where appropriate. + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup?"); + await InvokeAgentAsync("What is the special drink?"); + await InvokeAgentAsync("Thank you"); + + // Display the entire chat history. + WriteChatHistory(await chat.GetChatMessagesAsync().ToArrayAsync()); + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); + + bool isFirst = false; + await foreach (StreamingChatMessageContent response in chat.InvokeStreamingAsync(agent)) + { + if (string.IsNullOrEmpty(response.Content)) + { + continue; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + } + } + + private void WriteChatHistory(IEnumerable chat) + { + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); + foreach (ChatMessageContent message in chat) + { + this.WriteAgentChatMessage(message); + } } private Kernel CreateKernelWithFilter() diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs index 82b2ca28bce0..8921dd2a6f9e 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs @@ -8,7 +8,7 @@ namespace Agents; /// /// Demonstrate service selection for through setting service-id -/// on and also providing override +/// on and also providing override /// when calling /// public class ChatCompletion_ServiceSelection(ITestOutputHelper output) : BaseTest(output) diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs index d3e94386af96..7a23efe6e112 100644 --- a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs @@ -1,6 +1,5 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.ComponentModel; -using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; @@ -9,10 +8,9 @@ namespace Agents; /// -/// Demonstrate creation of and -/// eliciting its response to three explicit user messages. +/// Demonstrate consuming "streaming" message for . /// -public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseTest(output) +public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ParrotName = "Parrot"; private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; @@ -35,6 +33,9 @@ public async Task UseStreamingChatCompletionAgentAsync() await InvokeAgentAsync(agent, chat, "Fortune favors the bold."); await InvokeAgentAsync(agent, chat, "I came, I saw, I conquered."); await InvokeAgentAsync(agent, chat, "Practice makes perfect."); + + // Output the entire chat history + DisplayChatHistory(chat); } [Fact] @@ -49,7 +50,7 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync() Name = "Host", Instructions = MenuInstructions, Kernel = this.CreateKernelWithChatCompletion(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). @@ -61,37 +62,56 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync() // Respond to user input await InvokeAgentAsync(agent, chat, "What is the special soup?"); await InvokeAgentAsync(agent, chat, "What is the special drink?"); + + // Output the entire chat history + DisplayChatHistory(chat); } // Local function to invoke agent and display the conversation messages. private async Task InvokeAgentAsync(ChatCompletionAgent agent, ChatHistory chat, string input) { - chat.Add(new ChatMessageContent(AuthorRole.User, input)); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + int historyCount = chat.Count; - StringBuilder builder = new(); - await foreach (StreamingChatMessageContent message in agent.InvokeStreamingAsync(chat)) + bool isFirst = false; + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(chat)) { - if (string.IsNullOrEmpty(message.Content)) + if (string.IsNullOrEmpty(response.Content)) { continue; } - if (builder.Length == 0) + if (!isFirst) { - Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}:"); + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; } - Console.WriteLine($"\t > streamed: '{message.Content}'"); - builder.Append(message.Content); + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + if (historyCount <= chat.Count) + { + for (int index = historyCount; index < chat.Count; index++) + { + this.WriteAgentChatMessage(chat[index]); + } } + } + + private void DisplayChatHistory(ChatHistory history) + { + // Display the chat history. + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); - if (builder.Length > 0) + foreach (ChatMessageContent message in history) { - // Display full response and capture in chat history - Console.WriteLine($"\t > complete: '{builder}'"); - chat.Add(new ChatMessageContent(AuthorRole.Assistant, builder.ToString()) { AuthorName = agent.Name }); + this.WriteAgentChatMessage(message); } } diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs new file mode 100644 index 000000000000..1bcf2adbe758 --- /dev/null +++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Templating.cs @@ -0,0 +1,132 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Microsoft.SemanticKernel.PromptTemplates.Liquid; + +namespace Agents; + +/// +/// Demonstrate parameterized template instruction for . +/// +public class ChatCompletion_Templating(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private readonly static (string Input, string? Style)[] s_inputs = + [ + (Input: "Home cooking is great.", Style: null), + (Input: "Talk about world peace.", Style: "iambic pentameter"), + (Input: "Say something about doing your best.", Style: "e. e. cummings"), + (Input: "What do you think about having fun?", Style: "old school rap") + ]; + + [Fact] + public async Task InvokeAgentWithInstructionsTemplateAsync() + { + // Instruction based template always processed by KernelPromptTemplateFactory + ChatCompletionAgent agent = + new() + { + Kernel = this.CreateKernelWithChatCompletion(), + Instructions = + """ + Write a one verse poem on the requested topic in the style of {{$style}}. + Always state the requested style of the poem. + """, + Arguments = new KernelArguments() + { + {"style", "haiku"} + } + }; + + await InvokeChatCompletionAgentWithTemplateAsync(agent); + } + + [Fact] + public async Task InvokeAgentWithKernelTemplateAsync() + { + // Default factory is KernelPromptTemplateFactory + await InvokeChatCompletionAgentWithTemplateAsync( + """ + Write a one verse poem on the requested topic in the style of {{$style}}. + Always state the requested style of the poem. + """); + } + + [Fact] + public async Task InvokeAgentWithHandlebarsTemplateAsync() + { + await InvokeChatCompletionAgentWithTemplateAsync( + """ + Write a one verse poem on the requested topic in the style of {{style}}. + Always state the requested style of the poem. + """, + HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + new HandlebarsPromptTemplateFactory()); + } + + [Fact] + public async Task InvokeAgentWithLiquidTemplateAsync() + { + await InvokeChatCompletionAgentWithTemplateAsync( + """ + Write a one verse poem on the requested topic in the style of {{style}}. + Always state the requested style of the poem. + """, + LiquidPromptTemplateFactory.LiquidTemplateFormat, + new LiquidPromptTemplateFactory()); + } + + private async Task InvokeChatCompletionAgentWithTemplateAsync( + string instructionTemplate, + string? templateFormat = null, + IPromptTemplateFactory? templateFactory = null) + { + // Define the agent + PromptTemplateConfig templateConfig = + new() + { + Template = instructionTemplate, + TemplateFormat = templateFormat, + }; + ChatCompletionAgent agent = + new(templateConfig, templateFactory) + { + Kernel = this.CreateKernelWithChatCompletion(), + Arguments = new KernelArguments() + { + {"style", "haiku"} + } + }; + + await InvokeChatCompletionAgentWithTemplateAsync(agent); + } + + private async Task InvokeChatCompletionAgentWithTemplateAsync(ChatCompletionAgent agent) + { + ChatHistory chat = []; + + foreach ((string input, string? style) in s_inputs) + { + // Add input to chat + ChatMessageContent request = new(AuthorRole.User, input); + chat.Add(request); + this.WriteAgentChatMessage(request); + + KernelArguments? arguments = null; + + if (!string.IsNullOrWhiteSpace(style)) + { + // Override style template parameter + arguments = new() { { "style", style } }; + } + + // Process agent response + await foreach (ChatMessageContent message in agent.InvokeAsync(chat, arguments)) + { + chat.Add(message); + this.WriteAgentChatMessage(message); + } + } + } +} diff --git a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs index aae984906ba3..dc9178156509 100644 --- a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs +++ b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs @@ -1,5 +1,4 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; @@ -7,16 +6,16 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Resources; +using ChatResponseFormat = OpenAI.Chat.ChatResponseFormat; + namespace Agents; /// /// Demonstrate usage of and /// to manage execution. /// -public class ComplexChat_NestedShopper(ITestOutputHelper output) : BaseTest(output) +public class ComplexChat_NestedShopper(ITestOutputHelper output) : BaseAgentsTest(output) { - protected override bool ForceOpenAI => true; - private const string InternalLeaderName = "InternalLeader"; private const string InternalLeaderInstructions = """ @@ -98,8 +97,8 @@ public async Task NestedChatWithAggregatorAgentAsync() { Console.WriteLine($"! {Model}"); - OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatCompletionsResponseFormat.JsonObject }; - OpenAIPromptExecutionSettings autoInvokeSettings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatResponseFormat.CreateJsonObjectFormat() }; + OpenAIPromptExecutionSettings autoInvokeSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; ChatCompletionAgent internalLeaderAgent = CreateAgent(InternalLeaderName, InternalLeaderInstructions); ChatCompletionAgent internalGiftIdeaAgent = CreateAgent(InternalGiftIdeaAgentName, InternalGiftIdeaAgentInstructions); @@ -154,20 +153,20 @@ public async Task NestedChatWithAggregatorAgentAsync() Console.WriteLine(">>>> AGGREGATED CHAT"); Console.WriteLine(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); - await foreach (ChatMessageContent content in chat.GetChatMessagesAsync(personalShopperAgent).Reverse()) + await foreach (ChatMessageContent message in chat.GetChatMessagesAsync(personalShopperAgent).Reverse()) { - Console.WriteLine($">>>> {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(message); } async Task InvokeChatAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync(personalShopperAgent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(personalShopperAgent)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } Console.WriteLine($"\n# IS COMPLETE: {chat.IsComplete}"); diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs deleted file mode 100644 index 062262fe8a8c..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs +++ /dev/null @@ -1,116 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Experimental.Agents; - -namespace Agents; - -/// -/// Showcase hiearchical Open AI Agent interactions using semantic kernel. -/// -public class Legacy_AgentAuthoring(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Specific model is required that supports agents and parallel function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; - - // Track agents for clean-up - private static readonly List s_agents = []; - - [Fact(Skip = "This test take more than 2 minutes to execute")] - public async Task RunAgentAsync() - { - Console.WriteLine($"======== {nameof(Legacy_AgentAuthoring)} ========"); - try - { - // Initialize the agent with tools - IAgent articleGenerator = await CreateArticleGeneratorAsync(); - - // "Stream" messages as they become available - await foreach (IChatMessage message in articleGenerator.InvokeAsync("Thai food is the best in the world")) - { - Console.WriteLine($"[{message.Id}]"); - Console.WriteLine($"# {message.Role}: {message.Content}"); - } - } - finally - { - await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); - } - } - - [Fact(Skip = "This test take more than 2 minutes to execute")] - public async Task RunAsPluginAsync() - { - Console.WriteLine($"======== {nameof(Legacy_AgentAuthoring)} ========"); - try - { - // Initialize the agent with tools - IAgent articleGenerator = await CreateArticleGeneratorAsync(); - - // Invoke as a plugin function - string response = await articleGenerator.AsPlugin().InvokeAsync("Thai food is the best in the world"); - - // Display final result - Console.WriteLine(response); - } - finally - { - await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); - } - } - - private static async Task CreateArticleGeneratorAsync() - { - // Initialize the outline agent - var outlineGenerator = await CreateOutlineGeneratorAsync(); - // Initialize the research agent - var sectionGenerator = await CreateResearchGeneratorAsync(); - - // Initialize agent so that it may be automatically deleted. - return - Track( - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .WithInstructions("You write concise opinionated articles that are published online. Use an outline to generate an article with one section of prose for each top-level outline element. Each section is based on research with a maximum of 120 words.") - .WithName("Article Author") - .WithDescription("Author an article on a given topic.") - .WithPlugin(outlineGenerator.AsPlugin()) - .WithPlugin(sectionGenerator.AsPlugin()) - .BuildAsync()); - } - - private static async Task CreateOutlineGeneratorAsync() - { - // Initialize agent so that it may be automatically deleted. - return - Track( - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .WithInstructions("Produce an single-level outline (no child elements) based on the given topic with at most 3 sections.") - .WithName("Outline Generator") - .WithDescription("Generate an outline.") - .BuildAsync()); - } - - private static async Task CreateResearchGeneratorAsync() - { - // Initialize agent so that it may be automatically deleted. - return - Track( - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .WithInstructions("Provide insightful research that supports the given topic based on your knowledge of the outline topic.") - .WithName("Researcher") - .WithDescription("Author research summary.") - .BuildAsync()); - } - - private static IAgent Track(IAgent agent) - { - s_agents.Add(agent); - - return agent; - } -} diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs deleted file mode 100644 index 877ba0971710..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs +++ /dev/null @@ -1,109 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Experimental.Agents; - -namespace Agents; - -// ReSharper disable once InconsistentNaming -/// -/// Showcase usage of code_interpreter and retrieval tools. -/// -public sealed class Legacy_AgentCharts(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Specific model is required that supports agents and parallel function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; - - /// - /// Flag to force usage of OpenAI configuration if both - /// and are defined. - /// If 'false', Azure takes precedence. - /// - private new const bool ForceOpenAI = false; - - /// - /// Create a chart and retrieve by file_id. - /// - [Fact(Skip = "Launches external processes")] - public async Task CreateChartAsync() - { - Console.WriteLine("======== Using CodeInterpreter tool ========"); - - var fileService = CreateFileService(); - - var agent = await CreateAgentBuilder().WithCodeInterpreter().BuildAsync(); - - try - { - var thread = await agent.NewThreadAsync(); - - await InvokeAgentAsync( - thread, - "1-first", @" -Display this data using a bar-chart with no summation: - -Banding Brown Pink Yellow Sum -X00000 339 433 126 898 -X00300 48 421 222 691 -X12345 16 395 352 763 -Others 23 373 156 552 -Sum 426 1622 856 2904 -"); - await InvokeAgentAsync(thread, "2-colors", "Can you regenerate this same chart using the category names as the bar colors?"); - await InvokeAgentAsync(thread, "3-line", "Can you regenerate this as a line chart?"); - } - finally - { - await agent.DeleteAsync(); - } - - async Task InvokeAgentAsync(IAgentThread thread, string imageName, string question) - { - await foreach (var message in thread.InvokeAsync(agent, question)) - { - if (message.ContentType == ChatMessageType.Image) - { - var filename = $"{imageName}.jpg"; - var path = Path.Combine(Environment.CurrentDirectory, filename); - Console.WriteLine($"# {message.Role}: {message.Content}"); - Console.WriteLine($"# {message.Role}: {path}"); - var content = await fileService.GetFileContentAsync(message.Content); - await using var outputStream = File.OpenWrite(filename); - await outputStream.WriteAsync(content.Data!.Value); - Process.Start( - new ProcessStartInfo - { - FileName = "cmd.exe", - Arguments = $"/C start {path}" - }); - } - else - { - Console.WriteLine($"# {message.Role}: {message.Content}"); - } - } - - Console.WriteLine(); - } - } - - private static OpenAIFileService CreateFileService() - { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new OpenAIFileService(TestConfiguration.OpenAI.ApiKey) : - new OpenAIFileService(new Uri(TestConfiguration.AzureOpenAI.Endpoint), apiKey: TestConfiguration.AzureOpenAI.ApiKey); - } - - private static AgentBuilder CreateAgentBuilder() - { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : - new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); - } -} diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs deleted file mode 100644 index 53ae0c07662a..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs +++ /dev/null @@ -1,176 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Experimental.Agents; - -namespace Agents; - -/// -/// Showcase complex Open AI Agent collaboration using semantic kernel. -/// -public class Legacy_AgentCollaboration(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Specific model is required that supports agents and function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-turbo-preview"; - - /// - /// Set this to 'true' to target OpenAI instead of Azure OpenAI. - /// - private const bool UseOpenAI = false; - - // Track agents for clean-up - private static readonly List s_agents = []; - - /// - /// Show how two agents are able to collaborate as agents on a single thread. - /// - [Fact(Skip = "This test take more than 5 minutes to execute")] - public async Task RunCollaborationAsync() - { - Console.WriteLine($"======== Example72:Collaboration:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========"); - - IAgentThread? thread = null; - try - { - // Create copy-writer agent to generate ideas - var copyWriter = await CreateCopyWriterAsync(); - // Create art-director agent to review ideas, provide feedback and final approval - var artDirector = await CreateArtDirectorAsync(); - - // Create collaboration thread to which both agents add messages. - thread = await copyWriter.NewThreadAsync(); - - // Add the user message - var messageUser = await thread.AddUserMessageAsync("concept: maps made out of egg cartons."); - DisplayMessage(messageUser); - - bool isComplete = false; - do - { - // Initiate copy-writer input - var agentMessages = await thread.InvokeAsync(copyWriter).ToArrayAsync(); - DisplayMessages(agentMessages, copyWriter); - - // Initiate art-director input - agentMessages = await thread.InvokeAsync(artDirector).ToArrayAsync(); - DisplayMessages(agentMessages, artDirector); - - // Evaluate if goal is met. - if (agentMessages.First().Content.Contains("PRINT IT", StringComparison.OrdinalIgnoreCase)) - { - isComplete = true; - } - } - while (!isComplete); - } - finally - { - // Clean-up (storage costs $) - await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); - } - } - - /// - /// Show how agents can collaborate as agents using the plug-in model. - /// - /// - /// While this may achieve an equivalent result to , - /// it is not using shared thread state for agent interaction. - /// - [Fact(Skip = "This test take more than 2 minutes to execute")] - public async Task RunAsPluginsAsync() - { - Console.WriteLine($"======== Example72:AsPlugins:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========"); - - try - { - // Create copy-writer agent to generate ideas - var copyWriter = await CreateCopyWriterAsync(); - // Create art-director agent to review ideas, provide feedback and final approval - var artDirector = await CreateArtDirectorAsync(); - - // Create coordinator agent to oversee collaboration - var coordinator = - Track( - await CreateAgentBuilder() - .WithInstructions("Reply the provided concept and have the copy-writer generate an marketing idea (copy). Then have the art-director reply to the copy-writer with a review of the copy. Always include the source copy in any message. Always include the art-director comments when interacting with the copy-writer. Coordinate the repeated replies between the copy-writer and art-director until the art-director approves the copy.") - .WithPlugin(copyWriter.AsPlugin()) - .WithPlugin(artDirector.AsPlugin()) - .BuildAsync()); - - // Invoke as a plugin function - var response = await coordinator.AsPlugin().InvokeAsync("concept: maps made out of egg cartons."); - - // Display final result - Console.WriteLine(response); - } - finally - { - // Clean-up (storage costs $) - await Task.WhenAll(s_agents.Select(a => a.DeleteAsync())); - } - } - - private static async Task CreateCopyWriterAsync(IAgent? agent = null) - { - return - Track( - await CreateAgentBuilder() - .WithInstructions("You are a copywriter with ten years of experience and are known for brevity and a dry humor. You're laser focused on the goal at hand. Don't waste time with chit chat. The goal is to refine and decide on the single best copy as an expert in the field. Consider suggestions when refining an idea.") - .WithName("Copywriter") - .WithDescription("Copywriter") - .WithPlugin(agent?.AsPlugin()) - .BuildAsync()); - } - - private static async Task CreateArtDirectorAsync() - { - return - Track( - await CreateAgentBuilder() - .WithInstructions("You are an art director who has opinions about copywriting born of a love for David Ogilvy. The goal is to determine is the given copy is acceptable to print, even if it isn't perfect. If not, provide insight on how to refine suggested copy without example. Always respond to the most recent message by evaluating and providing critique without example. Always repeat the copy at the beginning. If copy is acceptable and meets your criteria, say: PRINT IT.") - .WithName("Art Director") - .WithDescription("Art Director") - .BuildAsync()); - } - - private static AgentBuilder CreateAgentBuilder() - { - var builder = new AgentBuilder(); - - return - UseOpenAI ? - builder.WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : - builder.WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); - } - - private void DisplayMessages(IEnumerable messages, IAgent? agent = null) - { - foreach (var message in messages) - { - DisplayMessage(message, agent); - } - } - - private void DisplayMessage(IChatMessage message, IAgent? agent = null) - { - Console.WriteLine($"[{message.Id}]"); - if (agent is not null) - { - Console.WriteLine($"# {message.Role}: ({agent.Name}) {message.Content}"); - } - else - { - Console.WriteLine($"# {message.Role}: {message.Content}"); - } - } - - private static IAgent Track(IAgent agent) - { - s_agents.Add(agent); - - return agent; - } -} diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs deleted file mode 100644 index 86dacb9c256d..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs +++ /dev/null @@ -1,100 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Experimental.Agents; -using Plugins; -using Resources; - -namespace Agents; - -/// -/// Showcase complex Open AI Agent interactions using semantic kernel. -/// -public class Legacy_AgentDelegation(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Specific model is required that supports agents and function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106"; - - // Track agents for clean-up - private static readonly List s_agents = []; - - /// - /// Show how to combine coordinate multiple agents. - /// - [Fact] - public async Task RunAsync() - { - Console.WriteLine("======== Example71_AgentDelegation ========"); - - if (TestConfiguration.OpenAI.ApiKey is null) - { - Console.WriteLine("OpenAI apiKey not found. Skipping example."); - return; - } - - IAgentThread? thread = null; - - try - { - var plugin = KernelPluginFactory.CreateFromType(); - var menuAgent = - Track( - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml")) - .WithDescription("Answer questions about how the menu uses the tool.") - .WithPlugin(plugin) - .BuildAsync()); - - var parrotAgent = - Track( - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml")) - .BuildAsync()); - - var toolAgent = - Track( - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml")) - .WithPlugin(parrotAgent.AsPlugin()) - .WithPlugin(menuAgent.AsPlugin()) - .BuildAsync()); - - var messages = new string[] - { - "What's on the menu?", - "Can you talk like pirate?", - "Thank you", - }; - - thread = await toolAgent.NewThreadAsync(); - foreach (var response in messages.Select(m => thread.InvokeAsync(toolAgent, m))) - { - await foreach (var message in response) - { - Console.WriteLine($"[{message.Id}]"); - Console.WriteLine($"# {message.Role}: {message.Content}"); - } - } - } - finally - { - // Clean-up (storage costs $) - await Task.WhenAll( - thread?.DeleteAsync() ?? Task.CompletedTask, - Task.WhenAll(s_agents.Select(a => a.DeleteAsync()))); - } - } - - private static IAgent Track(IAgent agent) - { - s_agents.Add(agent); - - return agent; - } -} diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs deleted file mode 100644 index 66d93ecc88d9..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs +++ /dev/null @@ -1,190 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Experimental.Agents; -using Resources; - -namespace Agents; - -// ReSharper disable once InconsistentNaming -/// -/// Showcase usage of code_interpreter and retrieval tools. -/// -public sealed class Legacy_AgentTools(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Specific model is required that supports agents and parallel function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview"; - - /// - /// Flag to force usage of OpenAI configuration if both - /// and are defined. - /// If 'false', Azure takes precedence. - /// - /// - /// NOTE: Retrieval tools is not currently available on Azure. - /// - private new const bool ForceOpenAI = true; - - // Track agents for clean-up - private readonly List _agents = []; - - /// - /// Show how to utilize code_interpreter tool. - /// - [Fact] - public async Task RunCodeInterpreterToolAsync() - { - Console.WriteLine("======== Using CodeInterpreter tool ========"); - - var builder = CreateAgentBuilder().WithInstructions("Write only code to solve the given problem without comment."); - - try - { - var defaultAgent = Track(await builder.BuildAsync()); - - var codeInterpreterAgent = Track(await builder.WithCodeInterpreter().BuildAsync()); - - await ChatAsync( - defaultAgent, - codeInterpreterAgent, - fileId: null, - "What is the solution to `3x + 2 = 14`?", - "What is the fibinacci sequence until 101?"); - } - finally - { - await Task.WhenAll(this._agents.Select(a => a.DeleteAsync())); - } - } - - /// - /// Show how to utilize retrieval tool. - /// - [Fact] - public async Task RunRetrievalToolAsync() - { - // Set to "true" to pass fileId via thread invocation. - // Set to "false" to associate fileId with agent definition. - const bool PassFileOnRequest = false; - - Console.WriteLine("======== Using Retrieval tool ========"); - - if (TestConfiguration.OpenAI.ApiKey is null) - { - Console.WriteLine("OpenAI apiKey not found. Skipping example."); - return; - } - - Kernel kernel = CreateFileEnabledKernel(); - var fileService = kernel.GetRequiredService(); - var result = - await fileService.UploadContentAsync( - new BinaryContent(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!, "text/plain"), - new OpenAIFileUploadExecutionSettings("travelinfo.txt", OpenAIFilePurpose.Assistants)); - - var fileId = result.Id; - Console.WriteLine($"! {fileId}"); - - var defaultAgent = Track(await CreateAgentBuilder().BuildAsync()); - - var retrievalAgent = Track(await CreateAgentBuilder().WithRetrieval().BuildAsync()); - - if (!PassFileOnRequest) - { - await retrievalAgent.AddFileAsync(fileId); - } - - try - { - await ChatAsync( - defaultAgent, - retrievalAgent, - PassFileOnRequest ? fileId : null, - "Where did sam go?", - "When does the flight leave Seattle?", - "What is the hotel contact info at the destination?"); - } - finally - { - await Task.WhenAll(this._agents.Select(a => a.DeleteAsync()).Append(fileService.DeleteFileAsync(fileId))); - } - } - - /// - /// Common chat loop used for: RunCodeInterpreterToolAsync and RunRetrievalToolAsync. - /// Processes each question for both "default" and "enabled" agents. - /// - private async Task ChatAsync( - IAgent defaultAgent, - IAgent enabledAgent, - string? fileId = null, - params string[] questions) - { - string[]? fileIds = null; - if (fileId is not null) - { - fileIds = [fileId]; - } - - foreach (var question in questions) - { - Console.WriteLine("\nDEFAULT AGENT:"); - await InvokeAgentAsync(defaultAgent, question); - - Console.WriteLine("\nTOOL ENABLED AGENT:"); - await InvokeAgentAsync(enabledAgent, question); - } - - async Task InvokeAgentAsync(IAgent agent, string question) - { - await foreach (var message in agent.InvokeAsync(question, null, fileIds)) - { - string content = message.Content; - foreach (var annotation in message.Annotations) - { - content = content.Replace(annotation.Label, string.Empty, StringComparison.Ordinal); - } - - Console.WriteLine($"# {message.Role}: {content}"); - - if (message.Annotations.Count > 0) - { - Console.WriteLine("\n# files:"); - foreach (var annotation in message.Annotations) - { - Console.WriteLine($"* {annotation.FileId}"); - } - } - } - - Console.WriteLine(); - } - } - - private static Kernel CreateFileEnabledKernel() - { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - Kernel.CreateBuilder().AddOpenAIFiles(TestConfiguration.OpenAI.ApiKey).Build() : - Kernel.CreateBuilder().AddAzureOpenAIFiles(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey).Build(); - } - - private static AgentBuilder CreateAgentBuilder() - { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : - new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); - } - - private IAgent Track(IAgent agent) - { - this._agents.Add(agent); - - return agent; - } -} diff --git a/dotnet/samples/Concepts/Agents/Legacy_Agents.cs b/dotnet/samples/Concepts/Agents/Legacy_Agents.cs deleted file mode 100644 index 5af10987bb3a..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_Agents.cs +++ /dev/null @@ -1,197 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Experimental.Agents; -using Plugins; -using Resources; - -namespace Agents; - -/// -/// Showcase Open AI Agent integration with semantic kernel: -/// https://platform.openai.com/docs/api-reference/agents -/// -public class Legacy_Agents(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Specific model is required that supports agents and function calling. - /// Currently this is limited to Open AI hosted services. - /// - private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106"; - - /// - /// Flag to force usage of OpenAI configuration if both - /// and are defined. - /// If 'false', Azure takes precedence. - /// - private new const bool ForceOpenAI = false; - - /// - /// Chat using the "Parrot" agent. - /// Tools/functions: None - /// - [Fact] - public Task RunSimpleChatAsync() - { - Console.WriteLine("======== Run:SimpleChat ========"); - - // Call the common chat-loop - return ChatAsync( - "Agents.ParrotAgent.yaml", // Defined under ./Resources/Agents - plugin: null, // No plugin - arguments: new KernelArguments { { "count", 3 } }, - "Fortune favors the bold.", - "I came, I saw, I conquered.", - "Practice makes perfect."); - } - - /// - /// Chat using the "Tool" agent and a method function. - /// Tools/functions: MenuPlugin - /// - [Fact] - public async Task RunWithMethodFunctionsAsync() - { - Console.WriteLine("======== Run:WithMethodFunctions ========"); - - LegacyMenuPlugin menuApi = new(); - KernelPlugin plugin = KernelPluginFactory.CreateFromObject(menuApi); - - // Call the common chat-loop - await ChatAsync( - "Agents.ToolAgent.yaml", // Defined under ./Resources/Agents - plugin, - arguments: new() { { LegacyMenuPlugin.CorrelationIdArgument, 3.141592653 } }, - "Hello", - "What is the special soup?", - "What is the special drink?", - "Do you have enough soup for 5 orders?", - "Thank you!"); - - Console.WriteLine("\nCorrelation Ids:"); - foreach (string correlationId in menuApi.CorrelationIds) - { - Console.WriteLine($"- {correlationId}"); - } - } - - /// - /// Chat using the "Tool" agent and a prompt function. - /// Tools/functions: spellChecker prompt function - /// - [Fact] - public Task RunWithPromptFunctionsAsync() - { - Console.WriteLine("======== WithPromptFunctions ========"); - - // Create a prompt function. - var function = KernelFunctionFactory.CreateFromPrompt( - "Correct any misspelling or gramatical errors provided in input: {{$input}}", - functionName: "spellChecker", - description: "Correct the spelling for the user input."); - - var plugin = KernelPluginFactory.CreateFromFunctions("spelling", "Spelling functions", [function]); - - // Call the common chat-loop - return ChatAsync( - "Agents.ToolAgent.yaml", // Defined under ./Resources/Agents - plugin, - arguments: null, - "Hello", - "Is this spelled correctly: exercize", - "What is the special soup?", - "Thank you!"); - } - - /// - /// Invoke agent just like any other . - /// - [Fact] - public async Task RunAsFunctionAsync() - { - Console.WriteLine("======== Run:AsFunction ========"); - - // Create parrot agent, same as the other cases. - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) - .FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml")) - .BuildAsync(); - - try - { - // Invoke agent plugin. - var response = await agent.AsPlugin().InvokeAsync("Practice makes perfect.", new KernelArguments { { "count", 2 } }); - - // Display result. - Console.WriteLine(response ?? $"No response from agent: {agent.Id}"); - } - finally - { - // Clean-up (storage costs $) - await agent.DeleteAsync(); - } - } - - /// - /// Common chat loop used for: RunSimpleChatAsync, RunWithMethodFunctionsAsync, and RunWithPromptFunctionsAsync. - /// 1. Reads agent definition from"resourcePath" parameter. - /// 2. Initializes agent with definition and the specified "plugin". - /// 3. Display the agent identifier - /// 4. Create a chat-thread - /// 5. Process the provided "messages" on the chat-thread - /// - private async Task ChatAsync( - string resourcePath, - KernelPlugin? plugin = null, - KernelArguments? arguments = null, - params string[] messages) - { - // Read agent resource - var definition = EmbeddedResource.Read(resourcePath); - - // Create agent - var agent = - await CreateAgentBuilder() - .FromTemplate(definition) - .WithPlugin(plugin) - .BuildAsync(); - - // Create chat thread. Note: Thread is not bound to a single agent. - var thread = await agent.NewThreadAsync(); - - // Enable provided arguments to be passed to function-calling - thread.EnableFunctionArgumentPassThrough = true; - - try - { - // Display agent identifier. - Console.WriteLine($"[{agent.Id}]"); - - // Process each user message and agent response. - foreach (var response in messages.Select(m => thread.InvokeAsync(agent, m, arguments))) - { - await foreach (var message in response) - { - Console.WriteLine($"[{message.Id}]"); - Console.WriteLine($"# {message.Role}: {message.Content}"); - } - } - } - finally - { - // Clean-up (storage costs $) - await Task.WhenAll( - thread?.DeleteAsync() ?? Task.CompletedTask, - agent.DeleteAsync()); - } - } - - private static AgentBuilder CreateAgentBuilder() - { - return - ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ? - new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) : - new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey); - } -} diff --git a/dotnet/samples/Concepts/Agents/Legacy_ChatCompletionAgent.cs b/dotnet/samples/Concepts/Agents/Legacy_ChatCompletionAgent.cs deleted file mode 100644 index f379adc2e4a7..000000000000 --- a/dotnet/samples/Concepts/Agents/Legacy_ChatCompletionAgent.cs +++ /dev/null @@ -1,146 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Kusto.Cloud.Platform.Utils; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Experimental.Agents; - -namespace Agents; - -public class Legacy_ChatCompletionAgent(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// This example demonstrates a chat with the chat completion agent that utilizes the SK ChatCompletion API to communicate with LLM. - /// - [Fact] - public async Task ChatWithAgentAsync() - { - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId) - .Build(); - - var agent = new ChatCompletionAgent( - kernel, - instructions: "You act as a professional financial adviser. However, clients may not know the terminology, so please provide a simple explanation.", - new OpenAIPromptExecutionSettings - { - MaxTokens = 500, - Temperature = 0.7, - TopP = 1.0, - PresencePenalty = 0.0, - FrequencyPenalty = 0.0, - } - ); - - var prompt = PrintPrompt("I need help with my investment portfolio. Please guide me."); - PrintConversation(await agent.InvokeAsync([new ChatMessageContent(AuthorRole.User, prompt)])); - } - - /// - /// This example demonstrates a round-robin chat between two chat completion agents using the TurnBasedChat collaboration experience. - /// - [Fact] - public async Task TurnBasedAgentsChatAsync() - { - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAIChatCompletion( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId) - .Build(); - - var settings = new OpenAIPromptExecutionSettings - { - MaxTokens = 1500, - Temperature = 0.7, - TopP = 1.0, - PresencePenalty = 0.0, - FrequencyPenalty = 0.0, - }; - - var fitnessTrainer = new ChatCompletionAgent( - kernel, - instructions: "As a fitness trainer, suggest workout routines, and exercises for beginners. " + - "You are not a stress management expert, so refrain from recommending stress management strategies. " + - "Collaborate with the stress management expert to create a holistic wellness plan." + - "Always incorporate stress reduction techniques provided by the stress management expert into the fitness plan." + - "Always include your role at the beginning of each response, such as 'As a fitness trainer.", - settings - ); - - var stressManagementExpert = new ChatCompletionAgent( - kernel, - instructions: "As a stress management expert, provide guidance on stress reduction strategies. " + - "Collaborate with the fitness trainer to create a simple and holistic wellness plan." + - "You are not a fitness expert; therefore, avoid recommending fitness exercises." + - "If the plan is not aligned with recommended stress reduction plan, ask the fitness trainer to rework it to incorporate recommended stress reduction techniques. " + - "Only you can stop the conversation by saying WELLNESS_PLAN_COMPLETE if suggested fitness plan is good." + - "Always include your role at the beginning of each response such as 'As a stress management expert.", - settings - ); - - var chat = new TurnBasedChat([fitnessTrainer, stressManagementExpert], (chatHistory, replies, turn) => - turn >= 10 || // Limit the number of turns to 10 - replies.Any( - message => message.Role == AuthorRole.Assistant && - message.Content!.Contains("WELLNESS_PLAN_COMPLETE", StringComparison.InvariantCulture))); // Exit when the message "WELLNESS_PLAN_COMPLETE" received from agent - - var prompt = "I need help creating a simple wellness plan for a beginner. Please guide me."; - PrintConversation(await chat.SendMessageAsync(prompt)); - } - - private string PrintPrompt(string prompt) - { - Console.WriteLine($"Prompt: {prompt}"); - - return prompt; - } - - private void PrintConversation(IEnumerable messages) - { - foreach (var message in messages) - { - Console.WriteLine($"------------------------------- {message.Role} ------------------------------"); - Console.WriteLine(message.Content); - Console.WriteLine(); - } - - Console.WriteLine(); - } - - private sealed class TurnBasedChat(IEnumerable agents, Func, int, bool> exitCondition) - { - public async Task> SendMessageAsync(string message, CancellationToken cancellationToken = default) - { - var chat = new ChatHistory(); - chat.AddUserMessage(message); - - IReadOnlyList result; - - var turn = 0; - - do - { - var agent = this._agents[turn % this._agents.Length]; - - result = await agent.InvokeAsync(chat, cancellationToken); - - chat.AddRange(result); - - turn++; - } - while (!this._exitCondition(chat, result, turn)); - - return chat; - } - - private readonly ChatCompletionAgent[] _agents = agents.ToArray(); - private readonly Func, int, bool> _exitCondition = exitCondition; - } -} diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs index d3a894dd6c8e..159441147f77 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs @@ -10,7 +10,7 @@ namespace Agents; /// Demonstrate that two different agent types are able to participate in the same conversation. /// In this case a and participate. /// -public class MixedChat_Agents(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Agents(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -46,14 +46,14 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync() OpenAIAssistantAgent agentWriter = await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - definition: new() + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) { Instructions = CopyWriterInstructions, Name = CopyWriterName, - ModelId = this.Model, - }); + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); // Create a chat for agent interaction. AgentGroupChat chat = @@ -76,16 +76,16 @@ await OpenAIAssistantAgent.CreateAsync( }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } private sealed class ApprovalTerminationStrategy : TerminationStrategy diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs index 5d96de68da72..4f12657e0d7a 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs @@ -1,10 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Files; using Resources; namespace Agents; @@ -13,38 +12,36 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces file output. /// -public class MixedChat_Files(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Files(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target OpenAI services. - /// - protected override bool ForceOpenAI => true; - private const string SummaryInstructions = "Summarize the entire conversation for the user in natural language."; [Fact] public async Task AnalyzeFileAndGenerateReportAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); + + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); - OpenAIFileReference uploadFile = - await fileService.UploadContentAsync( - new BinaryContent(await EmbeddedResource.ReadAllAsync("30-user-context.txt"), mimeType: "text/plain"), - new OpenAIFileUploadExecutionSettings("30-user-context.txt", OpenAIFilePurpose.Assistants)); + OpenAIFile uploadFile = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")), + "30-user-context.txt", + FileUploadPurpose.Assistants); Console.WriteLine(this.ApiKey); // Define the agents OpenAIAssistantAgent analystAgent = await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + definition: new OpenAIAssistantDefinition(this.Model) { - EnableCodeInterpreter = true, // Enable code-interpreter - ModelId = this.Model, - FileIds = [uploadFile.Id] // Associate uploaded file with assistant - }); + EnableCodeInterpreter = true, + CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); ChatCompletionAgent summaryAgent = new() @@ -70,7 +67,7 @@ Create a tab delimited file report of the ordered (descending) frequency distrib finally { await analystAgent.DeleteAsync(); - await fileService.DeleteFileAsync(uploadFile.Id); + await fileClient.DeleteFileAsync(uploadFile.Id); } // Local function to invoke agent and display the conversation messages. @@ -78,21 +75,15 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) { if (!string.IsNullOrWhiteSpace(input)) { + ChatMessageContent message = new(AuthorRole.User, input); chat.AddChatMessage(new(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + this.WriteAgentChatMessage(message); } - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - Console.WriteLine($"\n# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - - foreach (AnnotationContent annotation in content.Items.OfType()) - { - Console.WriteLine($"\t* '{annotation.Quote}' => {annotation.FileId}"); - BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!); - byte[] byteContent = fileContent.Data?.ToArray() ?? []; - Console.WriteLine($"\n{Encoding.Default.GetString(byteContent)}"); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseContentAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs index 385577573ac6..03f047c756bd 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs @@ -3,7 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Files; namespace Agents; @@ -11,13 +11,8 @@ namespace Agents; /// Demonstrate agent interacts with /// when it produces image output. /// -public class MixedChat_Images(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target OpenAI services. - /// - protected override bool ForceOpenAI => true; - private const string AnalystName = "Analyst"; private const string AnalystInstructions = "Create charts as requested without explanation."; @@ -27,20 +22,22 @@ public class MixedChat_Images(ITestOutputHelper output) : BaseTest(output) [Fact] public async Task AnalyzeDataAndGenerateChartAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); + + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); // Define the agents OpenAIAssistantAgent analystAgent = await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + definition: new OpenAIAssistantDefinition(this.Model) { Instructions = AnalystInstructions, Name = AnalystName, EnableCodeInterpreter = true, - ModelId = this.Model, - }); + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); ChatCompletionAgent summaryAgent = new() @@ -86,26 +83,15 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) { if (!string.IsNullOrWhiteSpace(input)) { - chat.AddChatMessage(new(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); } - await foreach (ChatMessageContent message in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - if (!string.IsNullOrWhiteSpace(message.Content)) - { - Console.WriteLine($"\n# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'"); - } - - foreach (FileReferenceContent fileReference in message.Items.OfType()) - { - Console.WriteLine($"\t* Generated image - @{fileReference.FileId}"); - BinaryContent fileContent = await fileService.GetFileContentAsync(fileReference.FileId!); - byte[] byteContent = fileContent.Data?.ToArray() ?? []; - string filePath = Path.ChangeExtension(Path.GetTempFileName(), ".png"); - await File.WriteAllBytesAsync($"{filePath}.png", byteContent); - Console.WriteLine($"\t* Local path - {filePath}"); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseImageAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs index 92aa8a9ce9d4..7c9a2490d3e0 100644 --- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs +++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs @@ -3,14 +3,13 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; namespace Agents; /// /// Demonstrate the use of . /// -public class MixedChat_Reset(ITestOutputHelper output) : BaseTest(output) +public class MixedChat_Reset(ITestOutputHelper output) : BaseAgentsTest(output) { private const string AgentInstructions = """ @@ -21,19 +20,18 @@ The user may either provide information or query on information previously provi [Fact] public async Task ResetChatAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); // Define the agents OpenAIAssistantAgent assistantAgent = await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + definition: new OpenAIAssistantDefinition(this.Model) { Name = nameof(OpenAIAssistantAgent), Instructions = AgentInstructions, - ModelId = this.Model, - }); + }, + kernel: new Kernel()); ChatCompletionAgent chatAgent = new() @@ -74,16 +72,14 @@ async Task InvokeAgentAsync(Agent agent, string? input = null) { if (!string.IsNullOrWhiteSpace(input)) { - chat.AddChatMessage(new(AuthorRole.User, input)); - Console.WriteLine($"\n# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); } - await foreach (ChatMessageContent message in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - if (!string.IsNullOrWhiteSpace(message.Content)) - { - Console.WriteLine($"\n# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'"); - } + this.WriteAgentChatMessage(response); } } } diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs new file mode 100644 index 000000000000..c9364bc2b2a9 --- /dev/null +++ b/dotnet/samples/Concepts/Agents/MixedChat_Streaming.cs @@ -0,0 +1,122 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.Chat; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Agents; + +/// +/// Demonstrate consuming "streaming" message for and +/// both participating in an . +/// +public class MixedChat_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private const string ReviewerName = "ArtDirector"; + private const string ReviewerInstructions = + """ + You are an art director who has opinions about copywriting born of a love for David Ogilvy. + The goal is to determine is the given copy is acceptable to print. + If so, state that it is approved. + If not, provide insight on how to refine suggested copy without example. + """; + + private const string CopyWriterName = "CopyWriter"; + private const string CopyWriterInstructions = + """ + You are a copywriter with ten years of experience and are known for brevity and a dry humor. + The goal is to refine and decide on the single best copy as an expert in the field. + Only provide a single proposal per response. + You're laser focused on the goal at hand. + Don't waste time with chit chat. + Consider suggestions when refining an idea. + """; + + [Fact] + public async Task UseStreamingAgentChatAsync() + { + // Define the agents: one of each type + ChatCompletionAgent agentReviewer = + new() + { + Instructions = ReviewerInstructions, + Name = ReviewerName, + Kernel = this.CreateKernelWithChatCompletion(), + }; + + OpenAIAssistantAgent agentWriter = + await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = CopyWriterInstructions, + Name = CopyWriterName, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); + + // Create a chat for agent interaction. + AgentGroupChat chat = + new(agentWriter, agentReviewer) + { + ExecutionSettings = + new() + { + // Here a TerminationStrategy subclass is used that will terminate when + // an assistant message contains the term "approve". + TerminationStrategy = + new ApprovalTerminationStrategy() + { + // Only the art-director may approve. + Agents = [agentReviewer], + // Limit total number of turns + MaximumIterations = 10, + } + } + }; + + // Invoke chat and display messages. + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); + + string lastAgent = string.Empty; + await foreach (StreamingChatMessageContent response in chat.InvokeStreamingAsync()) + { + if (string.IsNullOrEmpty(response.Content)) + { + continue; + } + + if (!lastAgent.Equals(response.AuthorName, StringComparison.Ordinal)) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + lastAgent = response.AuthorName ?? string.Empty; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + // Display the chat history. + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); + + ChatMessageContent[] history = await chat.GetChatMessagesAsync().Reverse().ToArrayAsync(); + + for (int index = 0; index < history.Length; index++) + { + this.WriteAgentChatMessage(history[index]); + } + + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); + } + + private sealed class ApprovalTerminationStrategy : TerminationStrategy + { + // Terminate when the final message contains the term "approve" + protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken) + => Task.FromResult(history[history.Count - 1].Content?.Contains("approve", StringComparison.OrdinalIgnoreCase) ?? false); + } +} diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs index ef5ba80154fa..807d03ecc130 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs @@ -3,6 +3,7 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Files; namespace Agents; @@ -10,31 +11,30 @@ namespace Agents; /// Demonstrate using code-interpreter with to /// produce image content displays the requested charts. /// -public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseTest(output) +public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target Open AI services. - /// - protected override bool ForceOpenAI => true; - private const string AgentName = "ChartMaker"; private const string AgentInstructions = "Create charts as requested without explanation."; [Fact] public async Task GenerateChartWithOpenAIAssistantAgentAsync() { + OpenAIClientProvider provider = this.GetClientProvider(); + + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + // Define the agent OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + definition: new OpenAIAssistantDefinition(this.Model) { Instructions = AgentInstructions, Name = AgentName, EnableCodeInterpreter = true, - ModelId = this.Model, - }); + Metadata = AssistantSampleMetadata, + }, + kernel: new()); // Create a chat for agent interaction. AgentGroupChat chat = new(); @@ -55,6 +55,7 @@ Sum 426 1622 856 2904 """); await InvokeAgentAsync("Can you regenerate this same chart using the category names as the bar colors?"); + await InvokeAgentAsync("Perfect, can you regenerate this as a line chart?"); } finally { @@ -64,21 +65,14 @@ Sum 426 1622 856 2904 // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(new(AuthorRole.User, input)); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent message in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - if (!string.IsNullOrWhiteSpace(message.Content)) - { - Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'"); - } - - foreach (FileReferenceContent fileReference in message.Items.OfType()) - { - Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: @{fileReference.FileId}"); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseImageAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs deleted file mode 100644 index 75b237489025..000000000000 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs +++ /dev/null @@ -1,56 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Agents; - -/// -/// Demonstrate using code-interpreter on . -/// -public class OpenAIAssistant_CodeInterpreter(ITestOutputHelper output) : BaseTest(output) -{ - protected override bool ForceOpenAI => true; - - [Fact] - public async Task UseCodeInterpreterToolWithOpenAIAssistantAgentAsync() - { - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() - { - EnableCodeInterpreter = true, // Enable code-interpreter - ModelId = this.Model, - }); - - // Create a chat for agent interaction. - AgentGroupChat chat = new(); - - // Respond to user input - try - { - await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?"); - } - finally - { - await agent.DeleteAsync(); - } - - // Local function to invoke agent and display the conversation messages. - async Task InvokeAgentAsync(string input) - { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (var content in chat.InvokeAsync(agent)) - { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - } - } - } -} diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs index 8e64006ee9d3..a0d48bf94eaa 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs @@ -1,10 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Files; using Resources; namespace Agents; @@ -12,36 +11,32 @@ namespace Agents; /// /// Demonstrate using code-interpreter to manipulate and generate csv files with . /// -public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseTest(output) +public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output) { - /// - /// Target OpenAI services. - /// - protected override bool ForceOpenAI => true; - [Fact] public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync() { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); + OpenAIClientProvider provider = this.GetClientProvider(); - OpenAIFileReference uploadFile = - await fileService.UploadContentAsync( - new BinaryContent(await EmbeddedResource.ReadAllAsync("sales.csv"), mimeType: "text/plain"), - new OpenAIFileUploadExecutionSettings("sales.csv", OpenAIFilePurpose.Assistants)); + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); - Console.WriteLine(this.ApiKey); + OpenAIFile uploadFile = + await fileClient.UploadFileAsync( + new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!), + "sales.csv", + FileUploadPurpose.Assistants); // Define the agent OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() + provider, + definition: new OpenAIAssistantDefinition(this.Model) { - EnableCodeInterpreter = true, // Enable code-interpreter - ModelId = this.Model, - FileIds = [uploadFile.Id] // Associate uploaded file - }); + EnableCodeInterpreter = true, + CodeInterpreterFileIds = [uploadFile.Id], + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); // Create a chat for agent interaction. AgentGroupChat chat = new(); @@ -56,27 +51,20 @@ await OpenAIAssistantAgent.CreateAsync( finally { await agent.DeleteAsync(); - await fileService.DeleteFileAsync(uploadFile.Id); + await fileClient.DeleteFileAsync(uploadFile.Id); } // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(new(AuthorRole.User, input)); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - - foreach (AnnotationContent annotation in content.Items.OfType()) - { - Console.WriteLine($"\n* '{annotation.Quote}' => {annotation.FileId}"); - BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!); - byte[] byteContent = fileContent.Data?.ToArray() ?? []; - Console.WriteLine(Encoding.Default.GetString(byteContent)); - } + this.WriteAgentChatMessage(response); + await this.DownloadResponseContentAsync(fileClient, response); } } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs index 7537f53da726..a8f31622c753 100644 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs @@ -18,6 +18,7 @@ public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(ou [Fact] public async Task UploadAndRetrieveFilesAsync() { +#pragma warning disable CS0618 // Type or member is obsolete OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); BinaryContent[] files = [ @@ -27,7 +28,7 @@ public async Task UploadAndRetrieveFilesAsync() new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" } ]; - var fileContents = new Dictionary(); + Dictionary fileContents = new(); foreach (BinaryContent file in files) { OpenAIFileReference result = await fileService.UploadContentAsync(file, new(file.InnerContent!.ToString()!, OpenAIFilePurpose.FineTune)); @@ -48,7 +49,7 @@ public async Task UploadAndRetrieveFilesAsync() string? fileName = fileContents[fileReference.Id].InnerContent!.ToString(); ReadOnlyMemory data = content.Data ?? new(); - var typedContent = mimeType switch + BinaryContent typedContent = mimeType switch { "image/jpeg" => new ImageContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata }, "audio/wav" => new AudioContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata }, @@ -62,5 +63,7 @@ public async Task UploadAndRetrieveFilesAsync() // Delete the test file remotely await fileService.DeleteFileAsync(fileReference.Id); } + +#pragma warning restore CS0618 // Type or member is obsolete } } diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs deleted file mode 100644 index 6f30b6974ff7..000000000000 --- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs +++ /dev/null @@ -1,70 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Resources; - -namespace Agents; - -/// -/// Demonstrate using retrieval on . -/// -public class OpenAIAssistant_Retrieval(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Retrieval tool not supported on Azure OpenAI. - /// - protected override bool ForceOpenAI => true; - - [Fact] - public async Task UseRetrievalToolWithOpenAIAssistantAgentAsync() - { - OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey); - - OpenAIFileReference uploadFile = - await fileService.UploadContentAsync(new BinaryContent(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!, "text/plain"), - new OpenAIFileUploadExecutionSettings("travelinfo.txt", OpenAIFilePurpose.Assistants)); - - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() - { - EnableRetrieval = true, // Enable retrieval - ModelId = this.Model, - FileIds = [uploadFile.Id] // Associate uploaded file - }); - - // Create a chat for agent interaction. - AgentGroupChat chat = new(); - - // Respond to user input - try - { - await InvokeAgentAsync("Where did sam go?"); - await InvokeAgentAsync("When does the flight leave Seattle?"); - await InvokeAgentAsync("What is the hotel contact info at the destination?"); - } - finally - { - await agent.DeleteAsync(); - } - - // Local function to invoke agent and display the conversation messages. - async Task InvokeAgentAsync(string input) - { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) - { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - } - } - } -} diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs new file mode 100644 index 000000000000..e394b8c49dad --- /dev/null +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Streaming.cs @@ -0,0 +1,182 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.ComponentModel; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Agents; + +/// +/// Demonstrate consuming "streaming" message for . +/// +public class OpenAIAssistant_Streaming(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseStreamingAssistantAgentAsync() + { + const string AgentName = "Parrot"; + const string AgentInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + await InvokeAgentAsync(agent, threadId, "Fortune favors the bold."); + await InvokeAgentAsync(agent, threadId, "I came, I saw, I conquered."); + await InvokeAgentAsync(agent, threadId, "Practice makes perfect."); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, threadId); + } + + [Fact] + public async Task UseStreamingAssistantAgentWithPluginAsync() + { + const string AgentName = "Host"; + const string AgentInstructions = "Answer questions about the menu."; + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + Metadata = AssistantSampleMetadata, + }); + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + await InvokeAgentAsync(agent, threadId, "What is the special soup?"); + await InvokeAgentAsync(agent, threadId, "What is the special drink?"); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, threadId); + } + + [Fact] + public async Task UseStreamingAssistantWithCodeInterpreterAsync() + { + const string AgentName = "MathGuy"; + const string AgentInstructions = "Solve math problems with code."; + + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + kernel: new(), + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = AgentInstructions, + Name = AgentName, + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + await InvokeAgentAsync(agent, threadId, "Is 191 a prime number?"); + await InvokeAgentAsync(agent, threadId, "Determine the values in the Fibonacci sequence that that are less then the value of 101"); + + // Output the entire chat history + await DisplayChatHistoryAsync(agent, threadId); + } + + // Local function to invoke agent and display the conversation messages. + private async Task InvokeAgentAsync(OpenAIAssistantAgent agent, string threadId, string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + ChatHistory history = []; + + bool isFirst = false; + bool isCode = false; + await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(threadId, messages: history)) + { + if (string.IsNullOrEmpty(response.Content)) + { + continue; + } + + // Differentiate between assistant and tool messages + if (isCode != (response.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false)) + { + isFirst = false; + isCode = !isCode; + } + + if (!isFirst) + { + Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:"); + isFirst = true; + } + + Console.WriteLine($"\t > streamed: '{response.Content}'"); + } + + foreach (ChatMessageContent content in history) + { + this.WriteAgentChatMessage(content); + } + } + + private async Task DisplayChatHistoryAsync(OpenAIAssistantAgent agent, string threadId) + { + Console.WriteLine("================================"); + Console.WriteLine("CHAT HISTORY"); + Console.WriteLine("================================"); + + ChatMessageContent[] messages = await agent.GetThreadMessagesAsync(threadId).ToArrayAsync(); + for (int index = messages.Length - 1; index >= 0; --index) + { + this.WriteAgentChatMessage(messages[index]); + } + } + + public sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() + { + return @" +Special Soup: Clam Chowder +Special Salad: Cobb Salad +Special Drink: Chai Tea +"; + } + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) + { + return "$9.99"; + } + } +} diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs new file mode 100644 index 000000000000..3937635203a4 --- /dev/null +++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Templating.cs @@ -0,0 +1,142 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; +using Microsoft.SemanticKernel.PromptTemplates.Liquid; + +namespace Agents; + +/// +/// Demonstrate parameterized template instruction for . +/// +public class OpenAIAssistant_Templating(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private readonly static (string Input, string? Style)[] s_inputs = + [ + (Input: "Home cooking is great.", Style: null), + (Input: "Talk about world peace.", Style: "iambic pentameter"), + (Input: "Say something about doing your best.", Style: "e. e. cummings"), + (Input: "What do you think about having fun?", Style: "old school rap") + ]; + + [Fact] + public async Task InvokeAgentWithInstructionsAsync() + { + // Instruction based template always proceseed by KernelPromptTemplateFactory + OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = + """ + Write a one verse poem on the requested topic in the styles of {{$style}}. + Always state the requested style of the poem. + """, + Metadata = AssistantSampleMetadata + }, + kernel: new Kernel(), + defaultArguments: new KernelArguments() + { + {"style", "haiku"} + }); + + await InvokeAssistantAgentWithTemplateAsync(agent); + } + + [Fact] + public async Task InvokeAgentWithKernelTemplateAsync() + { + // Default factory is KernelPromptTemplateFactory + await InvokeAssistantAgentWithTemplateAsync( + """ + Write a one verse poem on the requested topic in the styles of {{$style}}. + Always state the requested style of the poem. + """); + } + + [Fact] + public async Task InvokeAgentWithHandlebarsTemplateAsync() + { + await InvokeAssistantAgentWithTemplateAsync( + """ + Write a one verse poem on the requested topic in the styles of {{style}}. + Always state the requested style of the poem. + """, + HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, + new HandlebarsPromptTemplateFactory()); + } + + [Fact] + public async Task InvokeAgentWithLiquidTemplateAsync() + { + await InvokeAssistantAgentWithTemplateAsync( + """ + Write a one verse poem on the requested topic in the styles of {{style}}. + Always state the requested style of the poem. + """, + LiquidPromptTemplateFactory.LiquidTemplateFormat, + new LiquidPromptTemplateFactory()); + } + + private async Task InvokeAssistantAgentWithTemplateAsync( + string instructionTemplate, + string? templateFormat = null, + IPromptTemplateFactory? templateFactory = null) + { + // Define the agent + OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateFromTemplateAsync( + clientProvider: this.GetClientProvider(), + capabilities: new OpenAIAssistantCapabilities(this.Model) + { + Metadata = AssistantSampleMetadata + }, + kernel: new Kernel(), + defaultArguments: new KernelArguments() + { + {"style", "haiku"} + }, + templateConfig: new PromptTemplateConfig + { + Template = instructionTemplate, + TemplateFormat = templateFormat, + }, + templateFactory); + + await InvokeAssistantAgentWithTemplateAsync(agent); + } + + private async Task InvokeAssistantAgentWithTemplateAsync(OpenAIAssistantAgent agent) + { + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + try + { + // Respond to user input + foreach ((string input, string? style) in s_inputs) + { + ChatMessageContent request = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, request); + this.WriteAgentChatMessage(request); + + KernelArguments? arguments = null; + + if (!string.IsNullOrWhiteSpace(style)) + { + arguments = new() { { "style", style } }; + } + + await foreach (ChatMessageContent message in agent.InvokeAsync(threadId, arguments)) + { + this.WriteAgentChatMessage(message); + } + } + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs new file mode 100644 index 000000000000..38f2add47fa6 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs @@ -0,0 +1,97 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; + +namespace ChatCompletion; + +// The following example shows how to use Semantic Kernel with Azure AI Inference / Azure AI Studio +public class AzureAIInference_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task ServicePromptAsync() + { + Console.WriteLine("======== Azure AI Inference - Chat Completion ========"); + + var chatService = new AzureAIInferenceChatCompletionService( + endpoint: new Uri(TestConfiguration.AzureAIInference.Endpoint), + apiKey: TestConfiguration.AzureAIInference.ApiKey); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + /* Output: + + Chat content: + ------------------------ + System: You are a librarian, expert about books + ------------------------ + User: Hi, I'm looking for book suggestions + ------------------------ + Assistant: Sure, I'd be happy to help! What kind of books are you interested in? Fiction or non-fiction? Any particular genre? + ------------------------ + User: I love history and philosophy, I'd like to learn something new about Greece, any suggestion? + ------------------------ + Assistant: Great! For history and philosophy books about Greece, here are a few suggestions: + + 1. "The Greeks" by H.D.F. Kitto - This is a classic book that provides an overview of ancient Greek history and culture, including their philosophy, literature, and art. + + 2. "The Republic" by Plato - This is one of the most famous works of philosophy in the Western world, and it explores the nature of justice and the ideal society. + + 3. "The Peloponnesian War" by Thucydides - This is a detailed account of the war between Athens and Sparta in the 5th century BCE, and it provides insight into the political and military strategies of the time. + + 4. "The Iliad" by Homer - This epic poem tells the story of the Trojan War and is considered one of the greatest works of literature in the Western canon. + + 5. "The Histories" by Herodotus - This is a comprehensive account of the Persian Wars and provides a wealth of information about ancient Greek culture and society. + + I hope these suggestions are helpful! + ------------------------ + */ + } + + [Fact] + public async Task ChatPromptAsync() + { + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddAzureAIInferenceChatCompletion( + endpoint: new Uri(TestConfiguration.AzureAIInference.Endpoint), + apiKey: TestConfiguration.AzureAIInference.ApiKey) + .Build(); + + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + Console.WriteLine(reply); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..62c1fd3dcb11 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs @@ -0,0 +1,176 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; + +namespace ChatCompletion; + +/// +/// These examples demonstrate the ways different content types are streamed by OpenAI LLM via the chat completion service. +/// +public class AzureAIInference_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example demonstrates chat completion streaming using OpenAI. + /// + [Fact] + public Task StreamChatAsync() + { + Console.WriteLine("======== Azure AI Inference - Chat Completion Streaming ========"); + + var chatService = new AzureAIInferenceChatCompletionService( + endpoint: new Uri(TestConfiguration.AzureAIInference.Endpoint), + apiKey: TestConfiguration.AzureAIInference.ApiKey); + + return this.StartStreamingChatAsync(chatService); + } + + /// + /// This example demonstrates chat completion streaming using OpenAI via the kernel. + /// + [Fact] + public async Task StreamChatPromptAsync() + { + Console.WriteLine("======== Azure AI Inference - Chat Prompt Completion Streaming ========"); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddAzureAIInferenceChatCompletion( + endpoint: new Uri(TestConfiguration.AzureAIInference.Endpoint), + apiKey: TestConfiguration.AzureAIInference.ApiKey) + .Build(); + + var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + Console.WriteLine(reply); + } + + /// + /// This example demonstrates how the chat completion service streams text content. + /// It shows how to access the response update via StreamingChatMessageContent.Content property + /// and alternatively via the StreamingChatMessageContent.Items property. + /// + [Fact] + public async Task StreamTextFromChatAsync() + { + Console.WriteLine("======== Stream Text from Chat Content ========"); + + // Create chat completion service + var chatService = new AzureAIInferenceChatCompletionService( + endpoint: new Uri(TestConfiguration.AzureAIInference.Endpoint), + apiKey: TestConfiguration.AzureAIInference.ApiKey); + + // Create chat history with initial system and user messages + ChatHistory chatHistory = new("You are a librarian, an expert on books."); + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions."); + chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?"); + + // Start streaming chat based on the chat history + await foreach (StreamingChatMessageContent chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + // Access the response update via StreamingChatMessageContent.Content property + Console.Write(chatUpdate.Content); + + // Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property + Console.Write(chatUpdate.Items.OfType().FirstOrDefault()); + } + } + + /// + /// Starts streaming chat with the chat completion service. + /// + /// The chat completion service instance. + private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + } + + /// + /// Streams the message output from the chat completion service. + /// + /// The chat completion service instance. + /// The chat history instance. + /// The author role. + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + + /// + /// Outputs the chat history by streaming the message output from the kernel. + /// + /// The kernel instance. + /// The prompt message. + /// The full message output from the kernel. + private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync(prompt)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + return fullMessage; + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs index dcfdf7b511f0..5e3c455143b7 100644 --- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs @@ -1,9 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI; +using Azure.AI.OpenAI.Chat; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using xRetry; namespace ChatCompletion; @@ -47,8 +47,8 @@ public async Task ExampleWithChatCompletionAsync() chatHistory.AddUserMessage(ask); // Chat Completion example - var chatExtensionsOptions = GetAzureChatExtensionsOptions(); - var promptExecutionSettings = new OpenAIPromptExecutionSettings { AzureChatExtensionsOptions = chatExtensionsOptions }; + var dataSource = GetAzureSearchDataSource(); + var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings { AzureChatDataSource = dataSource }; var chatCompletion = kernel.GetRequiredService(); @@ -98,8 +98,8 @@ public async Task ExampleWithKernelAsync() var function = kernel.CreateFunctionFromPrompt("Question: {{$input}}"); - var chatExtensionsOptions = GetAzureChatExtensionsOptions(); - var promptExecutionSettings = new OpenAIPromptExecutionSettings { AzureChatExtensionsOptions = chatExtensionsOptions }; + var dataSource = GetAzureSearchDataSource(); + var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings { AzureChatDataSource = dataSource }; // First question without previous context based on uploaded content. var response = await kernel.InvokeAsync(function, new(promptExecutionSettings) { ["input"] = ask }); @@ -125,20 +125,17 @@ public async Task ExampleWithKernelAsync() } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - private static AzureChatExtensionsOptions GetAzureChatExtensionsOptions() +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + private static AzureSearchChatDataSource GetAzureSearchDataSource() { - var azureSearchExtensionConfiguration = new AzureSearchChatExtensionConfiguration + return new AzureSearchChatDataSource { - SearchEndpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), - Authentication = new OnYourDataApiKeyAuthenticationOptions(TestConfiguration.AzureAISearch.ApiKey), + Endpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint), + Authentication = DataSourceAuthentication.FromApiKey(TestConfiguration.AzureAISearch.ApiKey), IndexName = TestConfiguration.AzureAISearch.IndexName }; - - return new AzureChatExtensionsOptions - { - Extensions = { azureSearchExtensionConfiguration } - }; } +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs new file mode 100644 index 000000000000..c27625437779 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs @@ -0,0 +1,99 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Azure.Identity; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +namespace ChatCompletion; + +// The following example shows how to use Semantic Kernel with Azure OpenAI API +public class AzureOpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task ChatPromptAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId) + .Build(); + + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + Console.WriteLine(reply); + } + + [Fact] + public async Task ServicePromptAsync() + { + Console.WriteLine("======== Azure Open AI - Chat Completion ========"); + + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + await StartChatAsync(chatCompletionService); + } + + /// + /// Sample showing how to use Azure Open AI Chat Completion with Azure Default Credential. + /// If local auth is disabled in the Azure Open AI deployment, you can use Azure Default Credential to authenticate. + /// + [Fact] + public async Task DefaultAzureCredentialSampleAsync() + { + Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential ========"); + + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + credentials: new DefaultAzureCredential(), + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + await StartChatAsync(chatCompletionService); + } + + private async Task StartChatAsync(IChatCompletionService chatGPT) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..1ef3647623aa --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs @@ -0,0 +1,155 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace ChatCompletion; + +/// +/// These examples demonstrate the ways different content types are streamed by Azure OpenAI via the chat completion service. +/// +public class AzureOpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example demonstrates chat completion streaming using Azure OpenAI. + /// + [Fact] + public Task StreamServicePromptAsync() + { + Console.WriteLine("======== Azure Open AI Chat Completion Streaming ========"); + + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + return this.StartStreamingChatAsync(chatCompletionService); + } + + /// + /// This example demonstrates how the chat completion service streams text content. + /// It shows how to access the response update via StreamingChatMessageContent.Content property + /// and alternatively via the StreamingChatMessageContent.Items property. + /// + [Fact] + public async Task StreamServicePromptTextAsync() + { + Console.WriteLine("======== Azure Open AI Streaming Text ========"); + + // Create chat completion service + AzureOpenAIChatCompletionService chatCompletionService = new( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + // Create chat history with initial system and user messages + ChatHistory chatHistory = new("You are a librarian, an expert on books."); + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions."); + chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?"); + + // Start streaming chat based on the chat history + await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + // Access the response update via StreamingChatMessageContent.Content property + Console.Write(chatUpdate.Content); + + // Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property + Console.Write(chatUpdate.Items.OfType().FirstOrDefault()); + } + } + + /// + /// This example demonstrates how the chat completion service streams raw function call content. + /// See for a sample demonstrating how to simplify + /// function call content building out of streamed function call updates using the . + /// + [Fact] + public async Task StreamFunctionCallContentAsync() + { + Console.WriteLine("======== Stream Function Call Content ========"); + + // Create chat completion service + AzureOpenAIChatCompletionService chatCompletionService = new(deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.Endpoint, + apiKey: TestConfiguration.AzureOpenAI.ApiKey, + modelId: TestConfiguration.AzureOpenAI.ChatModelId); + + // Create kernel with helper plugin. + Kernel kernel = new(); + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod((string longTestString) => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), + ]); + + // Create execution settings with manual function calling + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + // Create chat history with initial user question + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("Hi, what is the current time?"); + + // Start streaming chat based on the chat history + await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + // Getting list of function call updates requested by LLM + var streamingFunctionCallUpdates = chatUpdate.Items.OfType(); + + // Iterating over function call updates. Please use the unctionCallContentBuilder to simplify function call content building. + foreach (StreamingFunctionCallUpdateContent update in streamingFunctionCallUpdates) + { + Console.WriteLine($"Function call update: callId={update.CallId}, name={update.Name}, arguments={update.Arguments?.Replace("\n", "\\n")}, functionCallIndex={update.FunctionCallIndex}"); + } + } + } + + private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + } + + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs new file mode 100644 index 000000000000..eafae661111b --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs @@ -0,0 +1,53 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using Azure.AI.OpenAI; +using Microsoft.SemanticKernel; + +namespace ChatCompletion; + +public sealed class AzureOpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task RunAsync() + { + Console.WriteLine("======== Using a custom AzureOpenAI client ========"); + + Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint); + Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName); + Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey); + + // Create an HttpClient and include your custom header(s) + var httpClient = new HttpClient(); + httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); + + // Configure AzureOpenAIClient to use the customized HttpClient + var clientOptions = new AzureOpenAIClientOptions + { + Transport = new HttpClientPipelineTransport(httpClient), + NetworkTimeout = TimeSpan.FromSeconds(30), + RetryPolicy = new ClientRetryPolicy() + }; + + var customClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey), clientOptions); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.ChatDeploymentName, customClient) + .Build(); + + // Load semantic plugin defined with prompt templates + string folder = RepoFiles.SamplePluginsPath(); + + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "FunPlugin")); + + // Run + var result = await kernel.InvokeAsync( + kernel.Plugins["FunPlugin"]["Excuses"], + new() { ["input"] = "I have no homework" } + ); + Console.WriteLine(result.GetValue()); + + httpClient.Dispose(); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs index 05346974da2f..2d08c507aa4c 100644 --- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs +++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs @@ -2,6 +2,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; namespace ChatCompletion; diff --git a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs index 2e8f750e5476..f5963698ce0d 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs @@ -96,7 +96,7 @@ private async Task SimpleChatAsync(Kernel kernel) chatHistory.AddUserMessage("Hi, I'm looking for new power tools, any suggestion?"); await MessageOutputAsync(chatHistory); - // First bot assistant message + // First assistant message var reply = await chat.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); await MessageOutputAsync(chatHistory); @@ -105,7 +105,7 @@ private async Task SimpleChatAsync(Kernel kernel) chatHistory.AddUserMessage("I'm looking for a drill, a screwdriver and a hammer."); await MessageOutputAsync(chatHistory); - // Second bot assistant message + // Second assistant message reply = await chat.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); await MessageOutputAsync(chatHistory); diff --git a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs index 803a6b6fafcd..2b6f7b1f7556 100644 --- a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs @@ -97,7 +97,7 @@ private async Task StreamingChatAsync(Kernel kernel) chatHistory.AddUserMessage("Hi, I'm looking for alternative coffee brew methods, can you help me?"); await MessageOutputAsync(chatHistory); - // First bot assistant message + // First assistant message var streamingChat = chat.GetStreamingChatMessageContentsAsync(chatHistory); var reply = await MessageOutputAsync(streamingChat); chatHistory.Add(reply); @@ -106,7 +106,7 @@ private async Task StreamingChatAsync(Kernel kernel) chatHistory.AddUserMessage("Give me the best speciality coffee roasters."); await MessageOutputAsync(chatHistory); - // Second bot assistant message + // Second assistant message streamingChat = chat.GetStreamingChatMessageContentsAsync(chatHistory); reply = await MessageOutputAsync(streamingChat); chatHistory.Add(reply); diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs new file mode 100644 index 000000000000..7768ff24ba36 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs @@ -0,0 +1,183 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama; +using OllamaSharp.Models.Chat; + +namespace ChatCompletion; + +// The following example shows how to use Semantic Kernel with Ollama Chat Completion API +public class Ollama_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Demonstrates how you can use the chat completion service directly. + /// + [Fact] + public async Task ServicePromptAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("======== Ollama - Chat Completion ========"); + + var chatService = new OllamaChatCompletionService( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + this.OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + this.OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + this.OutputLastMessage(chatHistory); + } + + /// + /// Demonstrates how you can get extra information from the service response, using the underlying inner content. + /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + [Fact] + public async Task ServicePromptWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("======== Ollama - Chat Completion ========"); + + var chatService = new OllamaChatCompletionService( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + + // Assistant message details + // Ollama Sharp does not support non-streaming and always perform streaming calls, for this reason, the inner content is always a list of chunks. + var replyInnerContent = reply.InnerContent as List; + + OutputInnerContent(replyInnerContent!); + } + + /// + /// Demonstrates how you can template a chat history call using the kernel for invocation. + /// + [Fact] + public async Task ChatPromptAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint ?? "http://localhost:11434"), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + Console.WriteLine(reply); + } + + /// + /// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation. + /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + [Fact] + public async Task ChatPromptWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint ?? "http://localhost:11434"), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var functionResult = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + // Ollama Sharp does not support non-streaming and always perform streaming calls, for this reason, the inner content of a non-streaming result is a list of the generated chunks. + var messageContent = functionResult.GetValue(); // Retrieves underlying chat message content from FunctionResult. + var replyInnerContent = messageContent!.InnerContent as List; // Retrieves inner content from ChatMessageContent. + + OutputInnerContent(replyInnerContent!); + } + + /// + /// Retrieve extra information from each streaming chunk response in a list of chunks. + /// + /// List of streaming chunks provided as inner content of a chat message + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + private void OutputInnerContent(List innerContent) + { + Console.WriteLine($"Model: {innerContent![0].Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only + Console.WriteLine(" -- Chunk changing data -- "); + + innerContent.ForEach(streamChunk => + { + Console.WriteLine($"Message role: {streamChunk.Message.Role}"); + Console.WriteLine($"Message content: {streamChunk.Message.Content}"); + Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); + Console.WriteLine($"Done: {streamChunk.Done}"); + /// The last message in the chunk is a type with additional metadata. + if (streamChunk is ChatDoneResponseStream doneStreamChunk) + { + Console.WriteLine($"Done Reason: {doneStreamChunk.DoneReason}"); + Console.WriteLine($"Eval count: {doneStreamChunk.EvalCount}"); + Console.WriteLine($"Eval duration: {doneStreamChunk.EvalDuration}"); + Console.WriteLine($"Load duration: {doneStreamChunk.LoadDuration}"); + Console.WriteLine($"Total duration: {doneStreamChunk.TotalDuration}"); + Console.WriteLine($"Prompt eval count: {doneStreamChunk.PromptEvalCount}"); + Console.WriteLine($"Prompt eval duration: {doneStreamChunk.PromptEvalDuration}"); + } + Console.WriteLine("------------------------"); + }); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..45424cd3f87e --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs @@ -0,0 +1,265 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama; +using OllamaSharp.Models.Chat; + +namespace ChatCompletion; + +/// +/// These examples demonstrate the ways different content types are streamed by Ollama via the chat completion service. +/// +public class Ollama_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example demonstrates chat completion streaming using Ollama. + /// + [Fact] + public Task StreamChatAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); + + var chatService = new OllamaChatCompletionService( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId); + + return this.StartStreamingChatAsync(chatService); + } + + /// + /// This example demonstrates retrieving extra information chat completion streaming using Ollama. + /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + [Fact] + public async Task StreamChatWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("======== Ollama - Chat Completion Streaming ========"); + + var chatService = new OllamaChatCompletionService( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + this.OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); + + await foreach (var chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + var innerContent = chatUpdate.InnerContent as ChatResponseStream; + OutputInnerContent(innerContent!); + } + } + + /// + /// Demonstrates how you can template a chat history call while using the kernel for invocation. + /// + [Fact] + public async Task StreamChatPromptAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + Console.WriteLine(reply); + } + + /// + /// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation. + /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + [Fact] + public async Task StreamChatPromptWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddOllamaChatCompletion( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync(chatPrompt.ToString())) + { + var innerContent = chatUpdate.InnerContent as ChatResponseStream; + OutputInnerContent(innerContent!); + } + } + + /// + /// This example demonstrates how the chat completion service streams text content. + /// It shows how to access the response update via StreamingChatMessageContent.Content property + /// and alternatively via the StreamingChatMessageContent.Items property. + /// + [Fact] + public async Task StreamTextFromChatAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("======== Stream Text from Chat Content ========"); + + // Create chat completion service + var chatService = new OllamaChatCompletionService( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId); + + // Create chat history with initial system and user messages + ChatHistory chatHistory = new("You are a librarian, an expert on books."); + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions."); + chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?"); + + // Start streaming chat based on the chat history + await foreach (StreamingChatMessageContent chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + // Access the response update via StreamingChatMessageContent.Content property + Console.Write(chatUpdate.Content); + + // Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property + Console.Write(chatUpdate.Items.OfType().FirstOrDefault()); + } + } + + private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + this.OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + this.OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + } + + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + + private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync(prompt)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + return fullMessage; + } + + /// + /// Retrieve extra information from each streaming chunk response. + /// + /// Streaming chunk provided as inner content of a streaming chat message + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + private void OutputInnerContent(ChatResponseStream streamChunk) + { + Console.WriteLine($"Model: {streamChunk.Model}"); + Console.WriteLine($"Message role: {streamChunk.Message.Role}"); + Console.WriteLine($"Message content: {streamChunk.Message.Content}"); + Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); + Console.WriteLine($"Done: {streamChunk.Done}"); + + /// The last message in the chunk is a type with additional metadata. + if (streamChunk is ChatDoneResponseStream doneStream) + { + Console.WriteLine($"Done Reason: {doneStream.DoneReason}"); + Console.WriteLine($"Eval count: {doneStream.EvalCount}"); + Console.WriteLine($"Eval duration: {doneStream.EvalDuration}"); + Console.WriteLine($"Load duration: {doneStream.LoadDuration}"); + Console.WriteLine($"Total duration: {doneStream.TotalDuration}"); + Console.WriteLine($"Prompt eval count: {doneStream.PromptEvalCount}"); + Console.WriteLine($"Prompt eval duration: {doneStream.PromptEvalDuration}"); + } + Console.WriteLine("------------------------"); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs new file mode 100644 index 000000000000..563ed3475b5e --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs @@ -0,0 +1,109 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Onnx; + +namespace ChatCompletion; + +// The following example shows how to use Semantic Kernel with Onnx Gen AI Chat Completion API +public class Onnx_ChatCompletion(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Example using the service directly to get chat message content + /// + /// + /// Configuration example: + /// + /// + /// ModelId: + /// phi-3 + /// + /// + /// ModelPath: + /// D:\huggingface\Phi-3-mini-4k-instruct-onnx\cpu_and_mobile\cpu-int4-rtn-block-32 + /// + /// + /// + [Fact] + public async Task ServicePromptAsync() + { + Assert.NotNull(TestConfiguration.Onnx.ModelId); // dotnet user-secrets set "Onnx:ModelId" "" + Assert.NotNull(TestConfiguration.Onnx.ModelPath); // dotnet user-secrets set "Onnx:ModelPath" "" + + Console.WriteLine("======== Onnx - Chat Completion ========"); + + var chatService = new OnnxRuntimeGenAIChatCompletionService( + modelId: TestConfiguration.Onnx.ModelId, + modelPath: TestConfiguration.Onnx.ModelPath); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + OutputLastMessage(chatHistory); + + // Second assistant message + reply = await chatService.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + OutputLastMessage(chatHistory); + } + + /// + /// Example using the kernel to send a chat history and get a chat message content + /// + /// + /// Configuration example: + /// + /// + /// ModelId: + /// phi-3 + /// + /// + /// ModelPath: + /// D:\huggingface\Phi-3-mini-4k-instruct-onnx\cpu_and_mobile\cpu-int4-rtn-block-32 + /// + /// + /// + [Fact] + public async Task ChatPromptAsync() + { + Assert.NotNull(TestConfiguration.Onnx.ModelId); // dotnet user-secrets set "Onnx:ModelId" "" + Assert.NotNull(TestConfiguration.Onnx.ModelPath); // dotnet user-secrets set "Onnx:ModelPath" "" + + Console.WriteLine("======== Onnx - Chat Prompt Completion ========"); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddOnnxRuntimeGenAIChatCompletion( + modelId: TestConfiguration.Onnx.ModelId, + modelPath: TestConfiguration.Onnx.ModelPath) + .Build(); + + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + Console.WriteLine(reply); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs new file mode 100644 index 000000000000..d6ad1f05e7f2 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs @@ -0,0 +1,208 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Onnx; + +namespace ChatCompletion; + +/// +/// These examples demonstrate the ways different content types are streamed by Onnx GenAI via the chat completion service. +/// +public class Onnx_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Streaming chat completion streaming using the service directly. + /// + /// + /// Configuration example: + /// + /// + /// ModelId: + /// phi-3 + /// + /// + /// ModelPath: + /// D:\huggingface\Phi-3-mini-4k-instruct-onnx\cpu_and_mobile\cpu-int4-rtn-block-32 + /// + /// + /// + [Fact] + public Task StreamChatAsync() + { + Assert.NotNull(TestConfiguration.Onnx.ModelId); // dotnet user-secrets set "Onnx:ModelId" "" + Assert.NotNull(TestConfiguration.Onnx.ModelPath); // dotnet user-secrets set "Onnx:ModelPath" "" + + Console.WriteLine("======== Onnx - Chat Completion Streaming ========"); + + var chatService = new OnnxRuntimeGenAIChatCompletionService( + modelId: TestConfiguration.Onnx.ModelId, + modelPath: TestConfiguration.Onnx.ModelPath); + + return this.StartStreamingChatAsync(chatService); + } + + /// + /// Streaming chat completion using the kernel. + /// + /// + /// Configuration example: + /// + /// + /// ModelId: + /// phi-3 + /// + /// + /// ModelPath: + /// D:\huggingface\Phi-3-mini-4k-instruct-onnx\cpu_and_mobile\cpu-int4-rtn-block-32 + /// + /// + /// + [Fact] + public async Task StreamChatPromptAsync() + { + Assert.NotNull(TestConfiguration.Onnx.ModelId); // dotnet user-secrets set "Onnx:ModelId" "" + Assert.NotNull(TestConfiguration.Onnx.ModelPath); // dotnet user-secrets set "Onnx:ModelPath" "" + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + Console.WriteLine("======== Onnx - Chat Completion Streaming ========"); + + var kernel = Kernel.CreateBuilder() + .AddOnnxRuntimeGenAIChatCompletion( + modelId: TestConfiguration.Onnx.ModelId, + modelPath: TestConfiguration.Onnx.ModelPath) + .Build(); + + var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + + Console.WriteLine(reply); + } + + /// + /// This example demonstrates how the chat completion service streams text content. + /// It shows how to access the response update via StreamingChatMessageContent.Content property + /// and alternatively via the StreamingChatMessageContent.Items property. + /// + /// + /// Configuration example: + /// + /// + /// ModelId: + /// phi-3 + /// + /// + /// ModelPath: + /// D:\huggingface\Phi-3-mini-4k-instruct-onnx\cpu_and_mobile\cpu-int4-rtn-block-32 + /// + /// + /// + [Fact] + public async Task StreamTextFromChatAsync() + { + Assert.NotNull(TestConfiguration.Onnx.ModelId); // dotnet user-secrets set "Onnx:ModelId" "" + Assert.NotNull(TestConfiguration.Onnx.ModelPath); // dotnet user-secrets set "Onnx:ModelPath" "" + + Console.WriteLine("======== Stream Text from Chat Content ========"); + + // Create chat completion service + var chatService = new OnnxRuntimeGenAIChatCompletionService( + modelId: TestConfiguration.Onnx.ModelId, + modelPath: TestConfiguration.Onnx.ModelPath); + + // Create chat history with initial system and user messages + ChatHistory chatHistory = new("You are a librarian, an expert on books."); + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions."); + chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?"); + + // Start streaming chat based on the chat history + await foreach (StreamingChatMessageContent chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + // Access the response update via StreamingChatMessageContent.Content property + Console.Write(chatUpdate.Content); + + // Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property + Console.Write(chatUpdate.Items.OfType().FirstOrDefault()); + } + } + + private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) + { + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); + + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + } + + private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); + } + + private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) + { + bool roleWritten = false; + string fullMessage = string.Empty; + + await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync(prompt)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } + + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } + } + + Console.WriteLine("\n------------------------"); + return fullMessage; + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs index 46aadfc243b0..fe2f8e8c2e40 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs @@ -1,82 +1,110 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Azure.Identity; +using System.Text; +using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; namespace ChatCompletion; -// The following example shows how to use Semantic Kernel with OpenAI ChatGPT API +// The following example shows how to use Semantic Kernel with OpenAI API public class OpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) { [Fact] - public async Task OpenAIChatSampleAsync() + public async Task ServicePromptAsync() { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + Console.WriteLine("======== Open AI - Chat Completion ========"); OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); await StartChatAsync(chatCompletionService); + } + + [Fact] + public async Task ServicePromptWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - /* Output: + Console.WriteLine("======== Open AI - Chat Completion ========"); - Chat content: - ------------------------ - System: You are a librarian, expert about books - ------------------------ - User: Hi, I'm looking for book suggestions - ------------------------ - Assistant: Sure, I'd be happy to help! What kind of books are you interested in? Fiction or non-fiction? Any particular genre? - ------------------------ - User: I love history and philosophy, I'd like to learn something new about Greece, any suggestion? - ------------------------ - Assistant: Great! For history and philosophy books about Greece, here are a few suggestions: + OpenAIChatCompletionService chatService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - 1. "The Greeks" by H.D.F. Kitto - This is a classic book that provides an overview of ancient Greek history and culture, including their philosophy, literature, and art. + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); - 2. "The Republic" by Plato - This is one of the most famous works of philosophy in the Western world, and it explores the nature of justice and the ideal society. + var chatHistory = new ChatHistory("You are a librarian, expert about books"); - 3. "The Peloponnesian War" by Thucydides - This is a detailed account of the war between Athens and Sparta in the 5th century BCE, and it provides insight into the political and military strategies of the time. + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + this.OutputLastMessage(chatHistory); - 4. "The Iliad" by Homer - This epic poem tells the story of the Trojan War and is considered one of the greatest works of literature in the Western canon. + // First assistant message + var reply = await chatService.GetChatMessageContentAsync(chatHistory, new OpenAIPromptExecutionSettings { Logprobs = true, TopLogprobs = 3 }); - 5. "The Histories" by Herodotus - This is a comprehensive account of the Persian Wars and provides a wealth of information about ancient Greek culture and society. + // Assistant message details + var replyInnerContent = reply.InnerContent as OpenAI.Chat.ChatCompletion; - I hope these suggestions are helpful! - ------------------------ - */ + OutputInnerContent(replyInnerContent!); } [Fact] - public async Task AzureOpenAIChatSampleAsync() + public async Task ChatPromptAsync() { - Console.WriteLine("======== Azure Open AI - Chat Completion ========"); + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); - await StartChatAsync(chatCompletionService); + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + + reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); + + Console.WriteLine(reply); } /// - /// Sample showing how to use Azure Open AI Chat Completion with Azure Default Credential. - /// If local auth is disabled in the Azure Open AI deployment, you can use Azure Default Credential to authenticate. + /// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation. /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OpenAI SDK that introduces breaking changes + /// may cause breaking changes in the code below. + /// [Fact] - public async Task AzureOpenAIWithDefaultAzureCredentialSampleAsync() + public async Task ChatPromptWithInnerContentAsync() { - Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential ========"); + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - credentials: new DefaultAzureCredential(), - modelId: TestConfiguration.AzureOpenAI.ChatModelId); + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); - await StartChatAsync(chatCompletionService); + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + var functionResult = await kernel.InvokePromptAsync(chatPrompt.ToString(), + new(new OpenAIPromptExecutionSettings { Logprobs = true, TopLogprobs = 3 })); + + var messageContent = functionResult.GetValue(); // Retrieves underlying chat message content from FunctionResult. + var replyInnerContent = messageContent!.InnerContent as OpenAI.Chat.ChatCompletion; // Retrieves inner content from ChatMessageContent. + + OutputInnerContent(replyInnerContent!); } private async Task StartChatAsync(IChatCompletionService chatGPT) @@ -88,33 +116,83 @@ private async Task StartChatAsync(IChatCompletionService chatGPT) // First user message chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - await MessageOutputAsync(chatHistory); + OutputLastMessage(chatHistory); - // First bot assistant message + // First assistant message var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); - await MessageOutputAsync(chatHistory); + OutputLastMessage(chatHistory); // Second user message chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); - await MessageOutputAsync(chatHistory); + OutputLastMessage(chatHistory); - // Second bot assistant message + // Second assistant message reply = await chatGPT.GetChatMessageContentAsync(chatHistory); chatHistory.Add(reply); - await MessageOutputAsync(chatHistory); + OutputLastMessage(chatHistory); } /// - /// Outputs the last message of the chat history + /// Retrieve extra information from a inner content of type . /// - private Task MessageOutputAsync(ChatHistory chatHistory) + /// An instance of retrieved as an inner content of . + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OpenAI SDK that introduces breaking changes + /// may break the code below. + /// + private void OutputInnerContent(OpenAI.Chat.ChatCompletion innerContent) { - var message = chatHistory.Last(); - - Console.WriteLine($"{message.Role}: {message.Content}"); - Console.WriteLine("------------------------"); - - return Task.CompletedTask; + Console.WriteLine($"Message role: {innerContent.Role}"); // Available as a property of ChatMessageContent + Console.WriteLine($"Message content: {innerContent.Content[0].Text}"); // Available as a property of ChatMessageContent + + Console.WriteLine($"Model: {innerContent.Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only + Console.WriteLine($"Created At: {innerContent.CreatedAt}"); + + Console.WriteLine($"Finish reason: {innerContent.FinishReason}"); + Console.WriteLine($"Input tokens usage: {innerContent.Usage.InputTokenCount}"); + Console.WriteLine($"Output tokens usage: {innerContent.Usage.OutputTokenCount}"); + Console.WriteLine($"Total tokens usage: {innerContent.Usage.TotalTokenCount}"); + Console.WriteLine($"Refusal: {innerContent.Refusal} "); + Console.WriteLine($"Id: {innerContent.Id}"); + Console.WriteLine($"System fingerprint: {innerContent.SystemFingerprint}"); + + if (innerContent.ContentTokenLogProbabilities.Count > 0) + { + Console.WriteLine("Content token log probabilities:"); + foreach (var contentTokenLogProbability in innerContent.ContentTokenLogProbabilities) + { + Console.WriteLine($"Token: {contentTokenLogProbability.Token}"); + Console.WriteLine($"Log probability: {contentTokenLogProbability.LogProbability}"); + + Console.WriteLine(" Top log probabilities for this token:"); + foreach (var topLogProbability in contentTokenLogProbability.TopLogProbabilities) + { + Console.WriteLine($" Token: {topLogProbability.Token}"); + Console.WriteLine($" Log probability: {topLogProbability.LogProbability}"); + Console.WriteLine(" ======="); + } + + Console.WriteLine("--------------"); + } + } + + if (innerContent.RefusalTokenLogProbabilities.Count > 0) + { + Console.WriteLine("Refusal token log probabilities:"); + foreach (var refusalTokenLogProbability in innerContent.RefusalTokenLogProbabilities) + { + Console.WriteLine($"Token: {refusalTokenLogProbability.Token}"); + Console.WriteLine($"Log probability: {refusalTokenLogProbability.LogProbability}"); + + Console.WriteLine(" Refusal top log probabilities for this token:"); + foreach (var topLogProbability in refusalTokenLogProbability.TopLogProbabilities) + { + Console.WriteLine($" Token: {topLogProbability.Token}"); + Console.WriteLine($" Log probability: {topLogProbability.LogProbability}"); + Console.WriteLine(" ======="); + } + } + } } } diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs deleted file mode 100644 index 9534cac09a63..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs +++ /dev/null @@ -1,133 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace ChatCompletion; - -/// -/// The following example shows how to use Semantic Kernel with multiple chat completion results. -/// -public class OpenAI_ChatCompletionMultipleChoices(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// Example with multiple chat completion results using . - /// - [Fact] - public async Task MultipleChatCompletionResultsUsingKernelAsync() - { - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - // Execution settings with configured ResultsPerPrompt property. - var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 }; - - var contents = await kernel.InvokePromptAsync>("Write a paragraph about why AI is awesome", new(executionSettings)); - - foreach (var content in contents!) - { - Console.Write(content.ToString() ?? string.Empty); - Console.WriteLine("\n-------------\n"); - } - } - - /// - /// Example with multiple chat completion results using . - /// - [Fact] - public async Task MultipleChatCompletionResultsUsingChatCompletionServiceAsync() - { - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - // Execution settings with configured ResultsPerPrompt property. - var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 }; - - var chatHistory = new ChatHistory(); - chatHistory.AddUserMessage("Write a paragraph about why AI is awesome"); - - var chatCompletionService = kernel.GetRequiredService(); - - foreach (var chatMessageContent in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings)) - { - Console.Write(chatMessageContent.Content ?? string.Empty); - Console.WriteLine("\n-------------\n"); - } - } - - /// - /// This example shows how to handle multiple results in case if prompt template contains a call to another prompt function. - /// is used for result selection. - /// - [Fact] - public async Task MultipleChatCompletionResultsInPromptTemplateAsync() - { - var kernel = Kernel - .CreateBuilder() - .AddOpenAIChatCompletion( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey) - .Build(); - - var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 }; - - // Initializing a function with execution settings for multiple results. - // We ask AI to write one paragraph, but in execution settings we specified that we want 3 different results for this request. - var function = KernelFunctionFactory.CreateFromPrompt("Write a paragraph about why AI is awesome", executionSettings, "GetParagraph"); - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); - - kernel.Plugins.Add(plugin); - - // Add function result selection filter. - kernel.FunctionInvocationFilters.Add(new FunctionResultSelectionFilter(this.Output)); - - // Inside our main request, we call MyPlugin.GetParagraph function for text summarization. - // Taking into account that MyPlugin.GetParagraph function produces 3 results, for text summarization we need to choose only one of them. - // Registered filter will be invoked during execution, which will select and return only 1 result, and this result will be inserted in our main request for summarization. - var result = await kernel.InvokePromptAsync("Summarize this text: {{MyPlugin.GetParagraph}}"); - - // It's possible to check what prompt was rendered for our main request. - Console.WriteLine($"Rendered prompt: '{result.RenderedPrompt}'"); - - // Output: - // Rendered prompt: 'Summarize this text: AI is awesome because...' - } - - /// - /// Example of filter which is responsible for result selection in case if some function produces multiple results. - /// - private sealed class FunctionResultSelectionFilter(ITestOutputHelper output) : IFunctionInvocationFilter - { - public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next) - { - await next(context); - - // Selection logic for function which is expected to produce multiple results. - if (context.Function.Name == "GetParagraph") - { - // Get multiple results from function invocation - var contents = context.Result.GetValue>()!; - - output.WriteLine("Multiple results:"); - - foreach (var content in contents) - { - output.WriteLine(content.ToString()); - } - - // Select first result for correct prompt rendering - var selectedContent = contents[0]; - context.Result = new FunctionResult(context.Function, selectedContent, context.Kernel.Culture, selectedContent.Metadata); - } - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs index 4836dcf03d9f..0e9fe0326290 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Text; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; @@ -15,30 +16,34 @@ public class OpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest /// This example demonstrates chat completion streaming using OpenAI. /// [Fact] - public Task StreamOpenAIChatAsync() + public async Task StreamServicePromptAsync() { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + Console.WriteLine("======== Open AI Chat Completion Streaming ========"); OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); - return this.StartStreamingChatAsync(chatCompletionService); - } + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); - /// - /// This example demonstrates chat completion streaming using Azure OpenAI. - /// - [Fact] - public Task StreamAzureOpenAIChatAsync() - { - Console.WriteLine("======== Azure Open AI Chat Completion Streaming ========"); + var chatHistory = new ChatHistory("You are a librarian, expert about books"); + OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); + OutputLastMessage(chatHistory); - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); + // First assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); - return this.StartStreamingChatAsync(chatCompletionService); + // Second user message + chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); + OutputLastMessage(chatHistory); + + // Second assistant message + await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); } /// @@ -47,16 +52,15 @@ public Task StreamAzureOpenAIChatAsync() /// and alternatively via the StreamingChatMessageContent.Items property. /// [Fact] - public async Task StreamTextContentAsync() + public async Task StreamServicePromptTextAsync() { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + Console.WriteLine("======== Stream Text Content ========"); // Create chat completion service - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); + OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); // Create chat history with initial system and user messages ChatHistory chatHistory = new("You are a librarian, an expert on books."); @@ -74,14 +78,109 @@ public async Task StreamTextContentAsync() } } + /// + /// This example demonstrates retrieving extra information chat completion streaming using OpenAI. + /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OpenAI SDK that introduces breaking changes + /// may break the code below. + /// + [Fact] + public async Task StreamServicePromptWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("======== OpenAI - Chat Completion Streaming (InnerContent) ========"); + + var chatService = new OpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("Chat content:"); + Console.WriteLine("------------------------"); + + var chatHistory = new ChatHistory("Answer straight, do not explain your answer"); + this.OutputLastMessage(chatHistory); + + // First user message + chatHistory.AddUserMessage("How many natural satellites are around Earth?"); + this.OutputLastMessage(chatHistory); + + await foreach (var chatUpdate in chatService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + var innerContent = chatUpdate.InnerContent as OpenAI.Chat.StreamingChatCompletionUpdate; + OutputInnerContent(innerContent!); + } + } + + /// + /// Demonstrates how you can template a chat history call while using the kernel for invocation. + /// + [Fact] + public async Task StreamChatPromptAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("======== OpenAI - Chat Prompt Completion Streaming ========"); + + StringBuilder chatPrompt = new(""" + You are a librarian, expert about books + Hi, I'm looking for book suggestions + """); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + var reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + chatPrompt.AppendLine($""); + chatPrompt.AppendLine("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); + reply = await StreamMessageOutputFromKernelAsync(kernel, chatPrompt.ToString()); + Console.WriteLine(reply); + } + + /// + /// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation. + /// + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OllamaSharp library that introduces breaking changes + /// may cause breaking changes in the code below. + /// + [Fact] + public async Task StreamChatPromptWithInnerContentAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("======== OpenAI - Chat Prompt Completion Streaming (InnerContent) ========"); + + StringBuilder chatPrompt = new(""" + Answer straight, do not explain your answer + How many natural satellites are around Earth? + """); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) + .Build(); + + await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync(chatPrompt.ToString())) + { + var innerContent = chatUpdate.InnerContent as OpenAI.Chat.StreamingChatCompletionUpdate; + OutputInnerContent(innerContent!); + } + } + /// /// This example demonstrates how the chat completion service streams raw function call content. - /// See for a sample demonstrating how to simplify + /// See for a sample demonstrating how to simplify /// function call content building out of streamed function call updates using the . /// [Fact] public async Task StreamFunctionCallContentAsync() { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + Console.WriteLine("======== Stream Function Call Content ========"); // Create chat completion service @@ -95,10 +194,10 @@ public async Task StreamFunctionCallContentAsync() ]); // Create execution settings with manual function calling - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; // Create chat history with initial user question - ChatHistory chatHistory = new(); + ChatHistory chatHistory = []; chatHistory.AddUserMessage("Hi, what is the current time?"); // Start streaming chat based on the chat history @@ -115,61 +214,151 @@ public async Task StreamFunctionCallContentAsync() } } - private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) + private async Task StreamMessageOutputAsync(OpenAIChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) { - Console.WriteLine("Chat content:"); - Console.WriteLine("------------------------"); - - var chatHistory = new ChatHistory("You are a librarian, expert about books"); - OutputLastMessage(chatHistory); + bool roleWritten = false; + string fullMessage = string.Empty; - // First user message - chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); - OutputLastMessage(chatHistory); + await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + { + if (!roleWritten && chatUpdate.Role.HasValue) + { + Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); + roleWritten = true; + } - // First bot assistant message - await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + if (chatUpdate.Content is { Length: > 0 }) + { + fullMessage += chatUpdate.Content; + Console.Write(chatUpdate.Content); + } - // Second user message - chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); - OutputLastMessage(chatHistory); + // The last message in the chunk has the usage metadata. + // https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options + if (chatUpdate.Metadata?["Usage"] is not null) + { + Console.WriteLine(chatUpdate.Metadata["Usage"]?.AsJson()); + } + } - // Second bot assistant message - await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); + Console.WriteLine("\n------------------------"); + chatHistory.AddMessage(authorRole, fullMessage); } - private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) + private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, string prompt) { bool roleWritten = false; string fullMessage = string.Empty; - - await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) + await foreach (var chatUpdate in kernel.InvokePromptStreamingAsync(prompt)) { if (!roleWritten && chatUpdate.Role.HasValue) { Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); roleWritten = true; } - if (chatUpdate.Content is { Length: > 0 }) { fullMessage += chatUpdate.Content; Console.Write(chatUpdate.Content); } - } + // The last message in the chunk has the usage metadata. + // https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options + if (chatUpdate.Metadata?["Usage"] is not null) + { + Console.WriteLine(chatUpdate.Metadata["Usage"]?.AsJson()); + } + } Console.WriteLine("\n------------------------"); - chatHistory.AddMessage(authorRole, fullMessage); + return fullMessage; } /// - /// Outputs the last message of the chat history + /// Retrieve extra information from a inner content of type . /// - private void OutputLastMessage(ChatHistory chatHistory) + /// An instance of retrieved as an inner content of . + /// + /// This is a breaking glass scenario, any attempt on running with different versions of OpenAI SDK that introduces breaking changes + /// may break the code below. + /// + private void OutputInnerContent(OpenAI.Chat.StreamingChatCompletionUpdate streamChunk) { - var message = chatHistory.Last(); + Console.WriteLine($"Id: {streamChunk.CompletionId}"); + Console.WriteLine($"Model: {streamChunk.Model}"); + Console.WriteLine($"Created at: {streamChunk.CreatedAt}"); + Console.WriteLine($"Finish reason: {(streamChunk.FinishReason?.ToString() ?? "--")}"); + Console.WriteLine($"System fingerprint: {streamChunk.SystemFingerprint}"); + + Console.WriteLine($"Content updates: {streamChunk.ContentUpdate.Count}"); + foreach (var contentUpdate in streamChunk.ContentUpdate) + { + Console.WriteLine($" Kind: {contentUpdate.Kind}"); + if (contentUpdate.Kind == OpenAI.Chat.ChatMessageContentPartKind.Text) + { + Console.WriteLine($" Text: {contentUpdate.Text}"); // Available as a properties of StreamingChatMessageContent.Items + Console.WriteLine(" ======="); + } + else if (contentUpdate.Kind == OpenAI.Chat.ChatMessageContentPartKind.Image) + { + Console.WriteLine($" Image uri: {contentUpdate.ImageUri}"); + Console.WriteLine($" Image media type: {contentUpdate.ImageBytesMediaType}"); + Console.WriteLine($" Image detail: {contentUpdate.ImageDetailLevel}"); + Console.WriteLine($" Image bytes: {contentUpdate.ImageBytes}"); + Console.WriteLine(" ======="); + } + else if (contentUpdate.Kind == OpenAI.Chat.ChatMessageContentPartKind.Refusal) + { + Console.WriteLine($" Refusal: {contentUpdate.Refusal}"); + Console.WriteLine(" ======="); + } + } + + if (streamChunk.ContentTokenLogProbabilities.Count > 0) + { + Console.WriteLine("Content token log probabilities:"); + foreach (var contentTokenLogProbability in streamChunk.ContentTokenLogProbabilities) + { + Console.WriteLine($"Token: {contentTokenLogProbability.Token}"); + Console.WriteLine($"Log probability: {contentTokenLogProbability.LogProbability}"); + + Console.WriteLine(" Top log probabilities for this token:"); + foreach (var topLogProbability in contentTokenLogProbability.TopLogProbabilities) + { + Console.WriteLine($" Token: {topLogProbability.Token}"); + Console.WriteLine($" Log probability: {topLogProbability.LogProbability}"); + Console.WriteLine(" ======="); + } + + Console.WriteLine("--------------"); + } + } - Console.WriteLine($"{message.Role}: {message.Content}"); + if (streamChunk.RefusalTokenLogProbabilities.Count > 0) + { + Console.WriteLine("Refusal token log probabilities:"); + foreach (var refusalTokenLogProbability in streamChunk.RefusalTokenLogProbabilities) + { + Console.WriteLine($"Token: {refusalTokenLogProbability.Token}"); + Console.WriteLine($"Log probability: {refusalTokenLogProbability.LogProbability}"); + + Console.WriteLine(" Refusal top log probabilities for this token:"); + foreach (var topLogProbability in refusalTokenLogProbability.TopLogProbabilities) + { + Console.WriteLine($" Token: {topLogProbability.Token}"); + Console.WriteLine($" Log probability: {topLogProbability.LogProbability}"); + Console.WriteLine(" ======="); + } + } + } + + // The last message in the chunk has the usage metadata. + // https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options + if (streamChunk.Usage is not null) + { + Console.WriteLine($"Usage input tokens: {streamChunk.Usage.InputTokenCount}"); + Console.WriteLine($"Usage output tokens: {streamChunk.Usage.OutputTokenCount}"); + Console.WriteLine($"Usage total tokens: {streamChunk.Usage.TotalTokenCount}"); + } Console.WriteLine("------------------------"); } } diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs deleted file mode 100644 index 6a23a43ae9f8..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs +++ /dev/null @@ -1,114 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace ChatCompletion; - -// The following example shows how to use Semantic Kernel with multiple streaming chat completion results. -public class OpenAI_ChatCompletionStreamingMultipleChoices(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public Task AzureOpenAIMultiStreamingChatCompletionAsync() - { - Console.WriteLine("======== Azure OpenAI - Multiple Chat Completions - Raw Streaming ========"); - - AzureOpenAIChatCompletionService chatCompletionService = new( - deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, - endpoint: TestConfiguration.AzureOpenAI.Endpoint, - apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ChatModelId); - - return StreamingChatCompletionAsync(chatCompletionService, 3); - } - - [Fact] - public Task OpenAIMultiStreamingChatCompletionAsync() - { - Console.WriteLine("======== OpenAI - Multiple Chat Completions - Raw Streaming ========"); - - OpenAIChatCompletionService chatCompletionService = new( - modelId: TestConfiguration.OpenAI.ChatModelId, - apiKey: TestConfiguration.OpenAI.ApiKey); - - return StreamingChatCompletionAsync(chatCompletionService, 3); - } - - /// - /// Streams the results of a chat completion request to the console. - /// - /// Chat completion service to use - /// Number of results to get for each chat completion request - private async Task StreamingChatCompletionAsync(IChatCompletionService chatCompletionService, - int numResultsPerPrompt) - { - var executionSettings = new OpenAIPromptExecutionSettings() - { - MaxTokens = 200, - FrequencyPenalty = 0, - PresencePenalty = 0, - Temperature = 1, - TopP = 0.5, - ResultsPerPrompt = numResultsPerPrompt - }; - - var consoleLinesPerResult = 10; - - // Uncomment this if you want to use a console app to display the results - // ClearDisplayByAddingEmptyLines(); - - var prompt = "Hi, I'm looking for 5 random title names for sci-fi books"; - - await ProcessStreamAsyncEnumerableAsync(chatCompletionService, prompt, executionSettings, consoleLinesPerResult); - - Console.WriteLine(); - - // Set cursor position to after displayed results - // Console.SetCursorPosition(0, executionSettings.ResultsPerPrompt * consoleLinesPerResult); - - Console.WriteLine(); - } - - /// - /// Does the actual streaming and display of the chat completion. - /// - private async Task ProcessStreamAsyncEnumerableAsync(IChatCompletionService chatCompletionService, string prompt, - OpenAIPromptExecutionSettings executionSettings, int consoleLinesPerResult) - { - var messagesPerChoice = new Dictionary(); - var chatHistory = new ChatHistory(prompt); - - // For each chat completion update - await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings)) - { - // Set cursor position to the beginning of where this choice (i.e. this result of - // a single multi-result request) is to be displayed. - // Console.SetCursorPosition(0, chatUpdate.ChoiceIndex * consoleLinesPerResult + 1); - - // The first time around, start choice text with role information - if (!messagesPerChoice.ContainsKey(chatUpdate.ChoiceIndex)) - { - messagesPerChoice[chatUpdate.ChoiceIndex] = $"Role: {chatUpdate.Role ?? new AuthorRole()}\n"; - Console.Write($"Choice index: {chatUpdate.ChoiceIndex}, Role: {chatUpdate.Role ?? new AuthorRole()}"); - } - - // Add latest completion bit, if any - if (chatUpdate.Content is { Length: > 0 }) - { - messagesPerChoice[chatUpdate.ChoiceIndex] += chatUpdate.Content; - } - - // Overwrite what is currently in the console area for the updated choice - // Console.Write(messagesPerChoice[chatUpdate.ChoiceIndex]); - Console.Write($"Choice index: {chatUpdate.ChoiceIndex}, Content: {chatUpdate.Content}"); - } - - // Display the aggregated results - foreach (string message in messagesPerChoice.Values) - { - Console.WriteLine("-------------------"); - Console.WriteLine(message); - } - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs deleted file mode 100644 index 9e63e4b46975..000000000000 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs +++ /dev/null @@ -1,56 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Azure; -using Azure.AI.OpenAI; -using Azure.Core.Pipeline; -using Microsoft.SemanticKernel; - -namespace ChatCompletion; - -public sealed class OpenAI_CustomAzureOpenAIClient(ITestOutputHelper output) : BaseTest(output) -{ - [Fact] - public async Task RunAsync() - { - Console.WriteLine("======== Using a custom OpenAI client ========"); - - string endpoint = TestConfiguration.AzureOpenAI.Endpoint; - string deploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName; - string apiKey = TestConfiguration.AzureOpenAI.ApiKey; - - if (endpoint is null || deploymentName is null || apiKey is null) - { - Console.WriteLine("Azure OpenAI credentials not found. Skipping example."); - return; - } - - // Create an HttpClient and include your custom header(s) - var httpClient = new HttpClient(); - httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); - - // Configure OpenAIClient to use the customized HttpClient - var clientOptions = new OpenAIClientOptions - { - Transport = new HttpClientTransport(httpClient), - }; - var openAIClient = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions); - - IKernelBuilder builder = Kernel.CreateBuilder(); - builder.AddAzureOpenAIChatCompletion(deploymentName, openAIClient); - Kernel kernel = builder.Build(); - - // Load semantic plugin defined with prompt templates - string folder = RepoFiles.SamplePluginsPath(); - - kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "FunPlugin")); - - // Run - var result = await kernel.InvokeAsync( - kernel.Plugins["FunPlugin"]["Excuses"], - new() { ["input"] = "I have no homework" } - ); - Console.WriteLine(result.GetValue()); - - httpClient.Dispose(); - } -} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs new file mode 100644 index 000000000000..c36b1d945c67 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs @@ -0,0 +1,52 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using Microsoft.SemanticKernel; +using OpenAI; + +namespace ChatCompletion; + +public sealed class OpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task RunAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + Console.WriteLine("======== Using a custom OpenAI client ========"); + + // Create an HttpClient and include your custom header(s) + using var httpClient = new HttpClient(); + httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value"); + + // Configure AzureOpenAIClient to use the customized HttpClient + var clientOptions = new OpenAIClientOptions + { + Transport = new HttpClientPipelineTransport(httpClient), + NetworkTimeout = TimeSpan.FromSeconds(30), + RetryPolicy = new ClientRetryPolicy() + }; + + var customClient = new OpenAIClient(new ApiKeyCredential(TestConfiguration.OpenAI.ApiKey), clientOptions); + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, customClient) + .Build(); + + // Load semantic plugin defined with prompt templates + string folder = RepoFiles.SamplePluginsPath(); + + kernel.ImportPluginFromPromptDirectory(Path.Combine(folder, "FunPlugin")); + + // Run + var result = await kernel.InvokeAsync( + kernel.Plugins["FunPlugin"]["Excuses"], + new() { ["input"] = "I have no homework" } + ); + Console.WriteLine(result.GetValue()); + + httpClient.Dispose(); + } +} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs index d8f9a4942547..c2271ea54500 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs @@ -19,7 +19,7 @@ public async Task AutoInvokeKernelFunctionsAsync() const string ChatPrompt = """ What is the weather like in Paris? """; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var chatSemanticFunction = kernel.CreateFunctionFromPrompt( ChatPrompt, executionSettings); var chatPromptResult = await kernel.InvokeAsync(chatSemanticFunction); @@ -39,7 +39,7 @@ public async Task AutoInvokeKernelFunctionsMultipleCallsAsync() { new ChatMessageContent(AuthorRole.User, "What is the weather like in Paris?") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result1 = await service.GetChatMessageContentAsync(chatHistory, executionSettings, kernel); chatHistory.Add(result1); @@ -60,7 +60,7 @@ public async Task AutoInvokeKernelFunctionsWithComplexParameterAsync() const string ChatPrompt = """ Book a holiday for me from 6th June 2025 to 20th June 2025? """; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var chatSemanticFunction = kernel.CreateFunctionFromPrompt( ChatPrompt, executionSettings); var chatPromptResult = await kernel.InvokeAsync(chatSemanticFunction); @@ -79,7 +79,7 @@ public async Task AutoInvokeLightPluginAsync() const string ChatPrompt = """ Turn on the light? """; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var chatSemanticFunction = kernel.CreateFunctionFromPrompt( ChatPrompt, executionSettings); var chatPromptResult = await kernel.InvokeAsync(chatSemanticFunction); diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCallingWithMemoryPlugin.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCallingWithMemoryPlugin.cs new file mode 100644 index 000000000000..4f80ce4be119 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCallingWithMemoryPlugin.cs @@ -0,0 +1,124 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using System.Text.Json; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Plugins.Memory; + +namespace ChatCompletion; + +/// +/// Samples show how to use with OpenAI chat completion. +/// +public class OpenAI_FunctionCallingWithMemoryPlugin(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This sample demonstrates how to use a function to retrieve useful information from the memory. + /// + /// + /// The old and classes are used to store and retrieve information. + /// These implementations will be replaced soon and this sample will be updated to demonstrate the new (much improved) pattern. + /// + [Fact] + public async Task UseFunctionCallingToRetrieveMemoriesAsync() + { + Assert.NotNull(TestConfiguration.OpenAI.ChatModelId); + Assert.NotNull(TestConfiguration.OpenAI.EmbeddingModelId); + Assert.NotNull(TestConfiguration.OpenAI.ApiKey); + + // Create a kernel with OpenAI chat completion and text embedding generation + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId!, + apiKey: TestConfiguration.OpenAI.ApiKey!); + kernelBuilder.AddOpenAITextEmbeddingGeneration( + modelId: TestConfiguration.OpenAI.EmbeddingModelId!, + apiKey: TestConfiguration.OpenAI.ApiKey!); + kernelBuilder.Services.AddSingleton(this.Output); + kernelBuilder.Services.AddSingleton(); + Kernel kernel = kernelBuilder.Build(); + + // Create a text memory store and populate it with sample data + var embeddingGeneration = kernel.GetRequiredService(); + VolatileMemoryStore memoryStore = new(); + SemanticTextMemory textMemory = new(memoryStore, embeddingGeneration); + string collectionName = "SemanticKernel"; + await PopulateMemoryAsync(collectionName, textMemory); + + // Add the text memory plugin to the kernel + MemoryPlugin memoryPlugin = new(collectionName, textMemory); + kernel.Plugins.AddFromObject(memoryPlugin, "Memory"); + + // Invoke chat prompt with auto invocation of functions enabled + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + var chatPrompt = + """ + What is Semantic Kernel? + """; + var response = await kernel.InvokePromptAsync(chatPrompt, new(executionSettings)); + + Console.WriteLine(response); + } + + #region private + /// + /// Utility to populate a text memory store with sample data. + /// + private static async Task PopulateMemoryAsync(string collection, SemanticTextMemory textMemory) + { + string[] entries = + [ + "Semantic Kernel is a lightweight, open-source development kit that lets you easily build AI agents and integrate the latest AI models into your C#, Python, or Java codebase. It serves as an efficient middleware that enables rapid delivery of enterprise-grade solutions.", + "Semantic Kernel is a new AI SDK, and a simple and yet powerful programming model that lets you add large language capabilities to your app in just a matter of minutes. It uses natural language prompting to create and execute semantic kernel AI tasks across multiple languages and platforms.", + "In this guide, you learned how to quickly get started with Semantic Kernel by building a simple AI agent that can interact with an AI service and run your code. To see more examples and learn how to build more complex AI agents, check out our in-depth samples.", + "The Semantic Kernel extension for Visual Studio Code makes it easy to design and test semantic functions.The extension provides an interface for designing semantic functions and allows you to test them with the push of a button with your existing models and data.", + "The kernel is the central component of Semantic Kernel.At its simplest, the kernel is a Dependency Injection container that manages all of the services and plugins necessary to run your AI application." + ]; + foreach (var entry in entries) + { + await textMemory.SaveInformationAsync( + collection: collection, + text: entry, + id: Guid.NewGuid().ToString()); + } + } + + /// + /// Plugin that provides a function to retrieve useful information from the memory. + /// + private sealed class MemoryPlugin(string collection, ISemanticTextMemory memory) + { + [KernelFunction] + [Description("Retrieve useful information to help answer a question.")] + public async Task GetUsefulInformationAsync( + [Description("The question being asked")] string question) + { + List memories = await memory + .SearchAsync(collection, question) + .ToListAsync() + .ConfigureAwait(false); + + return JsonSerializer.Serialize(memories.Select(x => x.Metadata.Text)); + } + } + + /// + /// Implementation of that logs the function invocation. + /// + private sealed class FunctionInvocationFilter(ITestOutputHelper output) : IFunctionInvocationFilter + { + private readonly ITestOutputHelper _output = output; + + /// + public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next) + { + this._output.WriteLine($"Function Invocation - {context.Function.Name}"); + await next(context); + } + } + #endregion +} diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs index 74f3d4bd6a64..1c6e8e895e00 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs @@ -31,7 +31,7 @@ public async Task AskAssistantToExplainFunctionCallsAfterExecutionAsync() { new ChatMessageContent(AuthorRole.User, "What is the weather like in Paris?") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result1 = await service.GetChatMessageContentAsync(chatHistory, executionSettings, kernel); chatHistory.Add(result1); Console.WriteLine(result1); @@ -57,7 +57,7 @@ public async Task UseDecoratedFunctionAsync() { new ChatMessageContent(AuthorRole.User, "What is the weather like in Paris?") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result = await service.GetChatMessageContentAsync(chatHistory, executionSettings, kernel); chatHistory.Add(result); Console.WriteLine(result); @@ -78,7 +78,7 @@ public async Task UseDecoratedFunctionWithPromptAsync() string chatPrompt = """ What is the weather like in Paris? """; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result = await kernel.InvokePromptAsync(chatPrompt, new(executionSettings)); Console.WriteLine(result); } @@ -100,7 +100,7 @@ public async Task AskAssistantToExplainFunctionCallsBeforeExecutionAsync() { new ChatMessageContent(AuthorRole.User, "What is the weather like in Paris?") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result = await service.GetChatMessageContentAsync(chatHistory, executionSettings, kernel); chatHistory.Add(result); Console.WriteLine(result); @@ -124,7 +124,7 @@ public async Task QueryAssistantToExplainFunctionCallsBeforeExecutionAsync() { new ChatMessageContent(AuthorRole.User, "What is the weather like in Paris?") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result = await service.GetChatMessageContentAsync(chatHistory, executionSettings, kernel); chatHistory.Add(result); Console.WriteLine(result); @@ -193,7 +193,7 @@ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext co { new ChatMessageContent(AuthorRole.User, $"Provide an explanation why these functions: {string.Join(',', functionNames)} need to be called to answer this query: {message.Content}") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; var result = await service.GetChatMessageContentAsync(chatHistory, executionSettings, context.Kernel); this._output.WriteLine(result); diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_RepeatedFunctionCalling.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_RepeatedFunctionCalling.cs index 11ea5ab362f9..eff0aba7398e 100644 --- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_RepeatedFunctionCalling.cs +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_RepeatedFunctionCalling.cs @@ -28,7 +28,7 @@ public async Task ReuseFunctionResultExecutionAsync() { new ChatMessageContent(AuthorRole.User, "What is the weather like in Boston?") }; - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result1 = await service.GetChatMessageContentAsync(chatHistory, executionSettings, kernel); chatHistory.Add(result1); Console.WriteLine(result1); diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_StructuredOutputs.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_StructuredOutputs.cs new file mode 100644 index 000000000000..e4297e854d65 --- /dev/null +++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_StructuredOutputs.cs @@ -0,0 +1,278 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; + +namespace ChatCompletion; + +/// +/// Structured Outputs is a feature in OpenAI API that ensures the model will always generate responses based on provided JSON Schema. +/// This gives more control over model responses, allows to avoid model hallucinations and write simpler prompts without a need to be specific about response format. +/// More information here: . +/// +/// +/// OpenAI Structured Outputs feature is available only in latest large language models, starting with GPT-4o. +/// More information here: . +/// +/// +/// Some keywords from JSON Schema are not supported in OpenAI Structured Outputs yet. For example, "format" keyword for strings is not supported. +/// It means that properties with types , , , , +/// , are not supported. +/// This information should be taken into consideration during response format type design. +/// More information here: . +/// +public class OpenAI_StructuredOutputs(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This method shows how to enable Structured Outputs feature with object by providing + /// JSON schema of desired response format. + /// + [Fact] + public async Task StructuredOutputsWithChatResponseFormatAsync() + { + // Initialize kernel. + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: "gpt-4o-2024-08-06", + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Initialize ChatResponseFormat object with JSON schema of desired response format. + ChatResponseFormat chatResponseFormat = ChatResponseFormat.CreateJsonSchemaFormat( + jsonSchemaFormatName: "movie_result", + jsonSchema: BinaryData.FromString(""" + { + "type": "object", + "properties": { + "Movies": { + "type": "array", + "items": { + "type": "object", + "properties": { + "Title": { "type": "string" }, + "Director": { "type": "string" }, + "ReleaseYear": { "type": "integer" }, + "Rating": { "type": "number" }, + "IsAvailableOnStreaming": { "type": "boolean" }, + "Tags": { "type": "array", "items": { "type": "string" } } + }, + "required": ["Title", "Director", "ReleaseYear", "Rating", "IsAvailableOnStreaming", "Tags"], + "additionalProperties": false + } + } + }, + "required": ["Movies"], + "additionalProperties": false + } + """), + jsonSchemaIsStrict: true); + + // Specify response format by setting ChatResponseFormat object in prompt execution settings. + var executionSettings = new OpenAIPromptExecutionSettings + { + ResponseFormat = chatResponseFormat + }; + + // Send a request and pass prompt execution settings with desired response format. + var result = await kernel.InvokePromptAsync("What are the top 10 movies of all time?", new(executionSettings)); + + // Deserialize string response to a strong type to access type properties. + // At this point, the deserialization logic won't fail, because MovieResult type was described using JSON schema. + // This ensures that response string is a serialized version of MovieResult type. + var movieResult = JsonSerializer.Deserialize(result.ToString())!; + + // Output the result. + this.OutputResult(movieResult); + + // Output: + + // Title: The Lord of the Rings: The Fellowship of the Ring + // Director: Peter Jackson + // Release year: 2001 + // Rating: 8.8 + // Is available on streaming: True + // Tags: Adventure,Drama,Fantasy + + // ...and more... + } + + /// + /// This method shows how to enable Structured Outputs feature with object by providing + /// the type of desired response format. In this scenario, JSON schema will be created automatically based on provided type. + /// + [Fact] + public async Task StructuredOutputsWithTypeInExecutionSettingsAsync() + { + // Initialize kernel. + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: "gpt-4o-2024-08-06", + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Specify response format by setting Type object in prompt execution settings. + var executionSettings = new OpenAIPromptExecutionSettings + { + ResponseFormat = typeof(MovieResult) + }; + + // Send a request and pass prompt execution settings with desired response format. + var result = await kernel.InvokePromptAsync("What are the top 10 movies of all time?", new(executionSettings)); + + // Deserialize string response to a strong type to access type properties. + // At this point, the deserialization logic won't fail, because MovieResult type was specified as desired response format. + // This ensures that response string is a serialized version of MovieResult type. + var movieResult = JsonSerializer.Deserialize(result.ToString())!; + + // Output the result. + this.OutputResult(movieResult); + + // Output: + + // Title: The Lord of the Rings: The Fellowship of the Ring + // Director: Peter Jackson + // Release year: 2001 + // Rating: 8.8 + // Is available on streaming: True + // Tags: Adventure,Drama,Fantasy + + // ...and more... + } + + /// + /// This method shows how to use Structured Outputs feature in combination with Function Calling. + /// function returns a of email bodies. + /// As for final result, the desired response format should be , which contains additional property. + /// This shows how the data can be transformed with AI using strong types without additional instructions in the prompt. + /// + [Fact] + public async Task StructuredOutputsWithFunctionCallingAsync() + { + // Initialize kernel. + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: "gpt-4o-2024-08-06", + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + kernel.ImportPluginFromType(); + + // Specify response format by setting Type object in prompt execution settings and enable automatic function calling. + var executionSettings = new OpenAIPromptExecutionSettings + { + ResponseFormat = typeof(EmailResult), + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Send a request and pass prompt execution settings with desired response format. + var result = await kernel.InvokePromptAsync("Process the emails.", new(executionSettings)); + + // Deserialize string response to a strong type to access type properties. + // At this point, the deserialization logic won't fail, because EmailResult type was specified as desired response format. + // This ensures that response string is a serialized version of EmailResult type. + var emailResult = JsonSerializer.Deserialize(result.ToString())!; + + // Output the result. + this.OutputResult(emailResult); + + // Output: + + // Email #1 + // Body: Let's catch up over coffee this Saturday. It's been too long! + // Category: Social + + // Email #2 + // Body: Please review the attached document and provide your feedback by EOD. + // Category: Work + + // ...and more... + } + + #region private + + /// Movie result struct that will be used as desired chat completion response format (structured output). + private struct MovieResult + { + public List Movies { get; set; } + } + + /// Movie struct that will be used as desired chat completion response format (structured output). + private struct Movie + { + public string Title { get; set; } + + public string Director { get; set; } + + public int ReleaseYear { get; set; } + + public double Rating { get; set; } + + public bool IsAvailableOnStreaming { get; set; } + + public List Tags { get; set; } + } + + private sealed class EmailResult + { + public List Emails { get; set; } + } + + private sealed class Email + { + public string Body { get; set; } + + public string Category { get; set; } + } + + /// Plugin to simulate RAG scenario and return collection of data. + private sealed class EmailPlugin + { + /// Function to simulate RAG scenario and return collection of data. + [KernelFunction] + private List GetEmails() + { + return + [ + "Hey, just checking in to see how you're doing!", + "Can you pick up some groceries on your way back home? We need milk and bread.", + "Happy Birthday! Wishing you a fantastic day filled with love and joy.", + "Let's catch up over coffee this Saturday. It's been too long!", + "Please review the attached document and provide your feedback by EOD.", + ]; + } + } + + /// Helper method to output object content. + private void OutputResult(MovieResult movieResult) + { + for (var i = 0; i < movieResult.Movies.Count; i++) + { + var movie = movieResult.Movies[i]; + + this.Output.WriteLine($"Movie #{i + 1}"); + this.Output.WriteLine($"Title: {movie.Title}"); + this.Output.WriteLine($"Director: {movie.Director}"); + this.Output.WriteLine($"Release year: {movie.ReleaseYear}"); + this.Output.WriteLine($"Rating: {movie.Rating}"); + this.Output.WriteLine($"Is available on streaming: {movie.IsAvailableOnStreaming}"); + this.Output.WriteLine($"Tags: {string.Join(",", movie.Tags)}"); + } + } + + /// Helper method to output object content. + private void OutputResult(EmailResult emailResult) + { + for (var i = 0; i < emailResult.Emails.Count; i++) + { + var email = emailResult.Emails[i]; + + this.Output.WriteLine($"Email #{i + 1}"); + this.Output.WriteLine($"Body: {email.Body}"); + this.Output.WriteLine($"Category: {email.Category}"); + } + } + + #endregion +} diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj index 89cc2c897d61..ae7e54f68530 100644 --- a/dotnet/samples/Concepts/Concepts.csproj +++ b/dotnet/samples/Concepts/Concepts.csproj @@ -8,7 +8,7 @@ false true - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001,CA1724 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 @@ -41,10 +41,15 @@ - + + + true + + + @@ -63,8 +68,9 @@ + + - @@ -100,13 +106,16 @@ - + Always - + - + Always - + + + Always + diff --git a/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs b/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs index 4c6e38452fc6..21abae070cf0 100644 --- a/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs +++ b/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs @@ -14,7 +14,7 @@ public async Task RunAsync() { ServiceCollection collection = new(); collection.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information)); - collection.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey); + collection.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); collection.AddSingleton(); // Registering class that uses Kernel to execute a plugin diff --git a/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs b/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs index 1e56b8f36878..3a6ff4b9617a 100644 --- a/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs +++ b/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs @@ -29,7 +29,7 @@ public async Task AutoFunctionInvocationFilterAsync() var executionSettings = new OpenAIPromptExecutionSettings { - ToolCallBehavior = ToolCallBehavior.RequireFunction(function.Metadata.ToOpenAIFunction(), autoInvoke: true) + FunctionChoiceBehavior = FunctionChoiceBehavior.Required([function], autoInvoke: true) }; var result = await kernel.InvokePromptAsync("Invoke provided function and return result", new(executionSettings)); @@ -76,7 +76,7 @@ public async Task GetFunctionCallsWithFilterAsync() var executionSettings = new OpenAIPromptExecutionSettings { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; await foreach (var chunk in kernel.InvokePromptStreamingAsync("Check current UTC time and return current weather in Boston city.", new(executionSettings))) diff --git a/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs b/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs index a2edd8948e51..6823f6c14820 100644 --- a/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs +++ b/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs @@ -57,7 +57,7 @@ public async Task LoggingAsync() // Enable automatic function calling. var executionSettings = new OpenAIPromptExecutionSettings { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions, + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(), ModelId = "gpt-4" }; diff --git a/dotnet/samples/Concepts/FunctionCalling/ContextDependentAdvertising.cs b/dotnet/samples/Concepts/FunctionCalling/ContextDependentAdvertising.cs new file mode 100644 index 000000000000..7d7adc2e5f7d --- /dev/null +++ b/dotnet/samples/Concepts/FunctionCalling/ContextDependentAdvertising.cs @@ -0,0 +1,104 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace FunctionCalling; + +/// +/// These samples demonstrate how to advertise functions to AI model based on a context. +/// +public class ContextDependentAdvertising(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This sample demonstrates how to advertise functions to AI model based on the context of the chat history. + /// It advertises functions to the AI model based on the game state. + /// For example, if the maze has not been created, advertise the create maze function only to prevent the AI model + /// from adding traps or treasures to the maze before it is created. + /// + [Fact] + public async Task AdvertiseFunctionsDependingOnContextPerUserInteractionAsync() + { + Kernel kernel = CreateKernel(); + + IChatCompletionService chatCompletionService = kernel.GetRequiredService(); + + // Tracking number of iterations to avoid infinite loop. + int maxIteration = 10; + int iteration = 0; + + // Define the functions for AI model to call. + var gameUtils = kernel.ImportPluginFromType(); + KernelFunction createMaze = gameUtils["CreateMaze"]; + KernelFunction addTraps = gameUtils["AddTrapsToMaze"]; + KernelFunction addTreasures = gameUtils["AddTreasuresToMaze"]; + KernelFunction playGame = gameUtils["PlayGame"]; + + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("I would like to play a maze game with a lot of tricky traps and shiny treasures."); + + // Loop until the game has started or the max iteration is reached. + while (!chatHistory.Any(item => item.Content?.Contains("Game started.") ?? false) && iteration < maxIteration) + { + List functionsToAdvertise = new(); + + // Decide game state based on chat history. + bool mazeCreated = chatHistory.Any(item => item.Content?.Contains("Maze created.") ?? false); + bool trapsAdded = chatHistory.Any(item => item.Content?.Contains("Traps added to the maze.") ?? false); + bool treasuresAdded = chatHistory.Any(item => item.Content?.Contains("Treasures added to the maze.") ?? false); + + // The maze has not been created yet so advertise the create maze function. + if (!mazeCreated) + { + functionsToAdvertise.Add(createMaze); + } + // The maze has been created so advertise the adding traps and treasures functions. + else if (mazeCreated && (!trapsAdded || !treasuresAdded)) + { + functionsToAdvertise.Add(addTraps); + functionsToAdvertise.Add(addTreasures); + } + // Both traps and treasures have been added so advertise the play game function. + else if (treasuresAdded && trapsAdded) + { + functionsToAdvertise.Add(playGame); + } + + // Provide the functions to the AI model. + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(functionsToAdvertise) }; + + // Prompt the AI model. + ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + Console.WriteLine(result); + + iteration++; + } + } + + private static Kernel CreateKernel() + { + // Create kernel + IKernelBuilder builder = Kernel.CreateBuilder(); + + builder.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + + return builder.Build(); + } + + private sealed class GameUtils + { + [KernelFunction] + public static string CreateMaze() => "Maze created."; + + [KernelFunction] + public static string AddTrapsToMaze() => "Traps added to the maze."; + + [KernelFunction] + public static string AddTreasuresToMaze() => "Treasures added to the maze."; + + [KernelFunction] + public static string PlayGame() => "Game started."; + } +} diff --git a/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs b/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs new file mode 100644 index 000000000000..98862fd4a820 --- /dev/null +++ b/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs @@ -0,0 +1,458 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace FunctionCalling; + +/// +/// These examples demonstrate how to enable and configure various aspects of function calling model in SK using the different function choice behaviors: +/// , , and . +/// The behaviors define the following aspect of function calling model: +/// 1. Function advertising - the list of functions to provide to the AI model. All three can advertise all kernel functions or a specified subset of them. +/// 2. Function calling behavior - whether the AI model automatically selects functions to call, is forced to call provided functions, or has to describe which functions it would call without calling them to complete the prompt. +/// 3. Function invocation - whether functions are invoked automatically by SK or manually by a caller and whether they are invoked sequentially or concurrently(not supported in auto-invocation mode yet) +/// +/// ** Function advertising ** +/// All three behaviors have the `functions` parameter of type . By default, it is null, +/// which means all kernel functions are provided or advertised to the AI model. If a list of functions is provided, +/// only those functions are advertised to the AI model. An empty list means no functions are provided to the AI model, +/// which is equivalent to disabling function calling. +/// +/// ** Function calling behavior ** +/// The behavior allows the model to decide whether to call the functions and, if so, which ones to call. +/// The behavior forces the model to call the provided functions. The behavior advertises functions in the first +/// request to the AI model only and stops advertising them in subsequent requests to prevent an infinite loop where the model keeps calling functions repeatedly. +/// The behavior tells the AI model to use the provided functions without calling them to generate a response. +/// This behavior is useful for dry runs when you want to see which functions the model would call without actually invoking them. +/// +/// ** Function invocation ** +/// The and supports two modes of function invocation: manual and automatic: +/// * Automatic function invocation mode causes all functions chosen by the AI model to be automatically invoked by SK. +/// The results of these function invocations are added to the chat history and sent to the model automatically in the following request. +/// The model then reasons about the chat history and then calls functions again or generates the final response. +/// This approach is fully automated and requires no manual intervention from the caller. The automatic invocation mode is enabled by default. +/// * Manual invocation mode returns all function calls requested by the AI model to the SK caller. The caller is fully responsible +/// for the invocation phase where they may decide which function to call, how to handle exceptions, call them in parallel or sequentially, etc. +/// The caller then adds the function results/exceptions to the chat history and returns it to the model, which reasons about it +/// and then calls functions again or generates the final response. This invocation mode provides more control over the function invocation phase to the caller. +/// To enable manual invocation, the caller needs to set the `autoInvoke` parameter to `false` when specifying either +/// or in the . +/// +/// SK supports only sequential invocation of functions in the automatic invocation mode at the moment. To invoke functions concurrently, a caller will need to do this manually. +/// +public class FunctionCalling(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This example demonstrates usage of that advertises all kernel functions to the AI model and invokes them automatically. + /// + [Fact] + public async Task RunPromptWithAutoFunctionChoiceBehaviorAdvertisingAllKernelFunctionsInvokedAutomaticallyAsync() + { + Kernel kernel = CreateKernel(); + + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + Console.WriteLine(await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))); + + // Expected output: "Boston is currently experiencing a rainy day, hence, the likely color of the sky in Boston is grey." + } + + /// + /// This example demonstrates usage of that advertises only one function to the AI model and invokes it automatically. + /// + [Fact] + public async Task RunPromptWithRequiredFunctionChoiceBehaviorAdvertisingOneFunctionInvokedAutomaticallyAsync() + { + Kernel kernel = CreateKernel(); + + KernelFunction getWeatherFunction = kernel.Plugins.GetFunction("HelperFunctions", "GetWeatherForCity"); + + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(functions: [getWeatherFunction]) }; + + Console.WriteLine(await kernel.InvokePromptAsync("Given that it is now the 9th of September 2024, 11:29 AM, what is the likely color of the sky in Boston?", new(settings))); + + // Expected output: "The sky in Boston is likely to be grey due to the rain." + } + + /// + /// This example demonstrates usage of that advertises all kernel functions to the AI model. + /// + [Fact] + public async Task RunPromptWithNoneFunctionChoiceBehaviorAdvertisingAllKernelFunctionsAsync() + { + Kernel kernel = CreateKernel(); + + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + Console.WriteLine(await kernel.InvokePromptAsync("Tell me which provided functions I would need to call to get the color of the sky in Boston on a specified date.", new(settings))); + + // Expected output: "You would first call the `HelperFunctions-GetCurrentUtcDateTime` function to get the current date time in UTC. Then, you would use the `HelperFunctions-GetWeatherForCity` function, + // passing in the city name as 'Boston' and the retrieved UTC date time. Note, however, that these functions won't directly tell you the color of the sky. + // The `GetWeatherForCity` function would provide weather data, and you may infer the general sky condition (e.g., clear, cloudy, rainy) based on this data, but it would not specify the color of the sky." + } + + /// + /// This example demonstrates usage of in YAML prompt template config that advertises all kernel functions to the AI model and invokes them automatically. + /// + [Fact] + public async Task RunPromptTemplateConfigWithAutoFunctionChoiceBehaviorAdvertisingAllKernelFunctionsInvokedAutomaticallyAsync() + { + Kernel kernel = CreateKernel(); + + // The `function_choice_behavior.functions` property is omitted which is equivalent to providing all kernel functions to the AI model. + string promptTemplateConfig = """ + template_format: semantic-kernel + template: Given the current time of day and weather, what is the likely color of the sky in Boston? + execution_settings: + default: + function_choice_behavior: + type: auto + """; + + KernelFunction promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplateConfig); + + Console.WriteLine(await kernel.InvokeAsync(promptFunction)); + + // Expected output: "Given that it's currently raining in Boston, the sky is likely to be gray." + } + + /// + /// This example demonstrates usage of in YAML prompt template config that advertises one kernel function to the AI model and invokes it automatically. + /// + [Fact] + public async Task RunPromptTemplateConfigWithAutoFunctionChoiceBehaviorAdvertisingOneFunctionInvokedAutomaticallyAsync() + { + Kernel kernel = CreateKernel(); + + // Only the `HelperFunctions.GetWeatherForCity` function which is added to the `function_choice_behavior.functions` list, is advertised to the AI model. + string promptTemplateConfig = """ + template_format: semantic-kernel + template: Given that it is now the 9th of September 2024, 11:29 AM, what is the likely color of the sky in Boston? + execution_settings: + default: + function_choice_behavior: + type: auto + functions: + - HelperFunctions.GetWeatherForCity + """; + + KernelFunction promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplateConfig); + + Console.WriteLine(await kernel.InvokeAsync(promptFunction)); + + // Expected output: "The color of the sky in Boston is likely to be grey due to the rain." + } + + [Fact] + /// + /// This example demonstrates usage of the non-streaming chat completion API with that advertises all kernel functions to the AI model and invokes them automatically. + /// + public async Task RunNonStreamingChatCompletionApiWithAutomaticFunctionInvocationAsync() + { + Kernel kernel = CreateKernel(); + + // To enable automatic function invocation, set the `autoInvoke` parameter to `true` in the line below or omit it as it is `true` by default. + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + IChatCompletionService chatCompletionService = kernel.GetRequiredService(); + + ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync( + "Given the current time of day and weather, what is the likely color of the sky in Boston?", + settings, + kernel); + + // Assert + Console.WriteLine(result); + + // Expected output: "The likely color of the sky in Boston is gray due to the current rainy weather." + } + + [Fact] + /// + /// This example demonstrates the usage of the streaming chat completion API with that advertises all kernel functions to the AI model and invokes them automatically. + /// + public async Task RunStreamingChatCompletionApiWithAutomaticFunctionInvocationAsync() + { + Kernel kernel = CreateKernel(); + + // To enable automatic function invocation, set the `autoInvoke` parameter to `true` in the line below or omit it as it is `true` by default. + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + IChatCompletionService chatCompletionService = kernel.GetRequiredService(); + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in chatCompletionService.GetStreamingChatMessageContentsAsync( + "Given the current time of day and weather, what is the likely color of the sky in Boston?", + settings, + kernel)) + { + stringBuilder.Append(update.Content); + } + + // Assert + Console.WriteLine(stringBuilder.ToString()); + + // Expected output: "Given that it's currently daytime and rainy in Boston, the sky is likely to be grey or overcast." + } + + /// + /// This example demonstrates the usage of the non-streaming chat completion API with that advertises all kernel functions to the AI model and invokes them manually. + /// + [Fact] + public async Task RunNonStreamingChatCompletionApiWithManualFunctionInvocationAsync() + { + Kernel kernel = CreateKernel(); + + IChatCompletionService chatCompletionService = kernel.GetRequiredService(); + + // To enable manual function invocation, set the `autoInvoke` parameter to `false`. + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = Microsoft.SemanticKernel.FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + while (true) + { + // Start or continue chat based on the chat history + ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + if (result.Content is not null) + { + Console.Write(result.Content); + // Expected output: "The color of the sky in Boston is likely to be gray due to the rainy weather." + } + + // Get function calls from the chat message content and quit the chat loop if no function calls are found. + IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result); + if (!functionCalls.Any()) + { + break; + } + + // Preserving the original chat message content with function calls in the chat history. + chatHistory.Add(result); + + // Iterating over the requested function calls and invoking them sequentially. + // The code can easily be modified to invoke functions in concurrently if needed. + foreach (FunctionCallContent functionCall in functionCalls) + { + try + { + // Invoking the function + FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); + + // Adding the function result to the chat history + chatHistory.Add(resultContent.ToChatMessage()); + } + catch (Exception ex) + { + // Adding function exception to the chat history. + chatHistory.Add(new FunctionResultContent(functionCall, ex).ToChatMessage()); + // or + //chatHistory.Add(new FunctionResultContent(functionCall, "Error details that the AI model can reason about.").ToChatMessage()); + } + } + + Console.WriteLine(); + } + } + + /// + /// This example demonstrates the usage of the streaming chat completion API with that advertises all kernel functions to the AI model and invokes them manually. + /// + [Fact] + public async Task RunStreamingChatCompletionApiWithManualFunctionCallingAsync() + { + Kernel kernel = CreateKernel(); + + IChatCompletionService chatCompletionService = kernel.GetRequiredService(); + + // To enable manual function invocation, set the `autoInvoke` parameter to `false`. + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = Microsoft.SemanticKernel.FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + // Create chat history with the initial user message + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + + // Start or continue streaming chat based on the chat history + await foreach (var streamingContent in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + if (streamingContent.Content is not null) + { + Console.Write(streamingContent.Content); + // Streamed output: "The color of the sky in Boston is likely to be gray due to the rainy weather." + } + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + // Build the function calls from the streaming content and quit the chat loop if no function calls are found + var functionCalls = fccBuilder.Build(); + if (!functionCalls.Any()) + { + break; + } + + // Creating and adding chat message content to preserve the original function calls in the chat history. + // The function calls are added to the chat message a few lines below. + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them. + // The code can easily be modified to invoke functions in concurrently if needed. + foreach (var functionCall in functionCalls) + { + // Adding the original function call to the chat message content + fcContent.Items.Add(functionCall); + + // Invoking the function + var functionResult = await functionCall.InvokeAsync(kernel); + + // Adding the function result to the chat history + chatHistory.Add(functionResult.ToChatMessage()); + } + + Console.WriteLine(); + } + } + + /// + /// This example demonstrates how a simulated function can be added to the chat history a manual function mode. + /// + /// + /// Simulated functions are not called or requested by the AI model but are added to the chat history by the caller. + /// They provide a way for callers to add additional information that, if provided via the prompt, would be ignored due to the model training. + /// + [Fact] + public async Task RunNonStreamingPromptWithSimulatedFunctionAsync() + { + Kernel kernel = CreateKernel(); + + IChatCompletionService chatCompletionService = kernel.GetRequiredService(); + + // Enabling manual function invocation + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = Microsoft.SemanticKernel.FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + ChatHistory chatHistory = []; + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + while (true) + { + ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + if (result.Content is not null) + { + Console.Write(result.Content); + // Expected output: "Considering the current weather conditions in Boston with a tornado watch in effect resulting in potential severe thunderstorms, + // the sky color is likely unusual such as green, yellow, or dark gray. Please stay safe and follow instructions from local authorities." + } + + chatHistory.Add(result); // Adding AI model response containing function calls(requests) to chat history as it's required by the models. + + IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result); + if (!functionCalls.Any()) + { + break; + } + + foreach (FunctionCallContent functionCall in functionCalls) + { + FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); // Invoking each function. + + chatHistory.Add(resultContent.ToChatMessage()); + } + + // Adding a simulated function call to the connector response message + FunctionCallContent simulatedFunctionCall = new("weather-alert", id: "call_123"); + result.Items.Add(simulatedFunctionCall); + + // Adding a simulated function result to chat history + string simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities."; + chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage()); + + Console.WriteLine(); + } + } + + /// + /// This example demonstrates how to disable function calling. + /// + [Fact] + public async Task DisableFunctionCallingAsync() + { + Kernel kernel = CreateKernel(); + + // Supplying an empty list to the `functions` parameter disables function calling. + // Alternatively, either omit assigning anything to the `FunctionChoiceBehavior` property or assign null to it to also disable function calling. + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(functions: []) }; + + Console.WriteLine(await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))); + + // Expected output: "Sorry, I cannot answer this question as it requires real-time information which I, as a text-based model, cannot access." + } + + /// + /// This example demonstrates how to disable function calling in the YAML prompt template config. + /// + [Fact] + public async Task DisableFunctionCallingInPromptTemplateConfigAsync() + { + Kernel kernel = CreateKernel(); + + // The `function_choice_behavior.functions` property is an empty list which disables function calling. + // Alternatively, you can omit the `function_choice_behavior` property to disable function calling. + string promptTemplateConfig = """ + template_format: semantic-kernel + template: Given that it is now the 9th of September 2024, 11:29 AM, what is the likely color of the sky in Boston? + execution_settings: + default: + function_choice_behavior: + type: auto + functions: [] + """; + + KernelFunction promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplateConfig); + + Console.WriteLine(await kernel.InvokeAsync(promptFunction)); + + // Expected output: "As an AI, I don't have real-time data or live feed to provide current weather conditions or the color of the sky." + } + + private static Kernel CreateKernel() + { + // Create kernel + IKernelBuilder builder = Kernel.CreateBuilder(); + + builder.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); + + Kernel kernel = builder.Build(); + + // Add a plugin with some helper functions we want to allow the model to call. + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcDateTime", "Retrieves the current date time in UTC."), + kernel.CreateFunctionFromMethod((string cityName, string currentDateTime) => + cityName switch + { + "Boston" => "61 and rainy", + "London" => "55 and cloudy", + "Miami" => "80 and sunny", + "Paris" => "60 and rainy", + "Tokyo" => "50 and sunny", + "Sydney" => "75 and sunny", + "Tel Aviv" => "80 and sunny", + _ => "31 and snowing", + }, "GetWeatherForCity", "Gets the current weather for the specified city"), + ]); + + return kernel; + } +} diff --git a/dotnet/samples/Concepts/FunctionCalling/MultipleFunctionsVsParameters.cs b/dotnet/samples/Concepts/FunctionCalling/MultipleFunctionsVsParameters.cs new file mode 100644 index 000000000000..9e023d281745 --- /dev/null +++ b/dotnet/samples/Concepts/FunctionCalling/MultipleFunctionsVsParameters.cs @@ -0,0 +1,213 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using System.Text.Json; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace FunctionCalling; + +/// +/// This sample shows different options for calling functions with multiple parameters. +/// The scenario is to search for invoices by customer name, purchase order, or vendor number. +/// +/// The first sample uses multiple functions, one for each search criteria. One issue is that +/// as the number of functions increases then the reliability of the AI model to select the correct +/// function may decrease. To help avoid this issue, you can try filtering which functions are advertised +/// to the AI model e.g. if your application has come context information which indicates a purchase order +/// is available then you can filter out the customer name and vendor number functions. +/// +/// The second sample uses a single function that takes an object with all search criteria. In this case some +/// of the search criteria are optional. Again as the number of parameters increases then the reliability of the +/// AI model may decrease. One advantage of this approach is that if the AI model can extra multiple search criteria +/// for the users ask then your plugin can use this information to provide more reliable results. +/// +/// For both options care should be taken to validate the parameters that the AI model provides. E.g. the customer +/// name could be wrong or the purchase order could be invalid. It is worth catching these errors and responding the +/// AI model with a message that explains what has gone wrong to see how it responds. It may be able to retry the search +/// and get a successful response on the second attempt. Or it may decide to revert pack to the human in the loop to ask +/// for more information. +/// +public class MultipleFunctionsVsParameters(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Shows how to use multiple Search By functions to search for invoices by customer name, purchase order, or vendor number. + /// + [Fact] + public async Task InvoiceSearchBySampleAsync() + { + // Create a kernel with OpenAI chat completion + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.Services.AddSingleton( + new AutoFunctionInvocationFilter(this.Output)); + kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + kernelBuilder.Plugins.AddFromType(); + Kernel kernel = kernelBuilder.Build(); + + await InvokePromptsAsync(kernel); + } + + /// + /// Shows how to use a single Search function to search for invoices by customer name, purchase order, or vendor number. + /// + [Fact] + public async Task InvoiceSearchSampleAsync() + { + // Create a kernel with OpenAI chat completion + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + kernelBuilder.Services.AddSingleton( + new AutoFunctionInvocationFilter(this.Output)); + kernelBuilder.AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + kernelBuilder.Plugins.AddFromType(); + Kernel kernel = kernelBuilder.Build(); + + await InvokePromptsAsync(kernel); + } + + /// Invoke the various prompts we want to test. + private async Task InvokePromptsAsync(Kernel kernel) + { + OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + Console.WriteLine("Prompt: Show me the invoices for customer named Contoso Industries."); + Console.WriteLine(await kernel.InvokePromptAsync("Show me the invoices for customer named Contoso Industries.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + Console.WriteLine("Prompt: Show me the invoices for purchase order PO123."); + Console.WriteLine(await kernel.InvokePromptAsync("Show me the invoices for purchase order PO123.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + Console.WriteLine("Prompt: Show me the invoices for vendor number VN123."); + Console.WriteLine(await kernel.InvokePromptAsync("Show me the invoices for vendor number VN123.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + Console.WriteLine("Prompt: Show me the invoices for Contoso Industries."); + Console.WriteLine(await kernel.InvokePromptAsync("Show me the invoices for Contoso Industries.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + Console.WriteLine("Prompt: Show me the invoices for PO123."); + Console.WriteLine(await kernel.InvokePromptAsync("Show me the invoices for PO123.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + Console.WriteLine("Prompt: Show me the invoices for VN123."); + Console.WriteLine(await kernel.InvokePromptAsync("Show me the invoices for VN123.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + Console.WriteLine("Prompt: Zeigen Sie mir die Rechnungen fรผr Contoso Industries."); + Console.WriteLine(await kernel.InvokePromptAsync("Zeigen Sie mir die Rechnungen fรผr Contoso Industries.", new(settings))); + Console.WriteLine("----------------------------------------------------"); + } + + /// Shows available syntax for auto function invocation filter. + private sealed class AutoFunctionInvocationFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter + { + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + var functionName = context.Function.Name; + var arguments = context.Arguments; + + // Output the details of the function being called + output.WriteLine($"Function: {functionName} {JsonSerializer.Serialize(arguments)}"); + + // Calling next filter in pipeline or function itself. + await next(context); + } + } + + /// + /// A plugin that provides methods to search for Invoices using different criteria. + /// + private sealed class InvoiceSearchBy + { + [KernelFunction] + [Description("Search for invoices by customer name.")] + public IEnumerable SearchByCustomerName([Description("The customer name.")] string customerName) + { + return + [ + new Invoice { CustomerName = customerName, PurchaseOrder = "PO123", VendorNumber = "VN123" }, + new Invoice { CustomerName = customerName, PurchaseOrder = "PO124", VendorNumber = "VN124" }, + new Invoice { CustomerName = customerName, PurchaseOrder = "PO125", VendorNumber = "VN125" }, + ]; + } + + [KernelFunction] + [Description("Search for invoices by purchase order.")] + public IEnumerable SearchByPurchaseOrder([Description("The purchase order. Purchase orders begin with a PN prefix.")] string purchaseOrder) + { + return + [ + new Invoice { CustomerName = "Customer1", PurchaseOrder = purchaseOrder, VendorNumber = "VN123" }, + new Invoice { CustomerName = "Customer2", PurchaseOrder = purchaseOrder, VendorNumber = "VN124" }, + new Invoice { CustomerName = "Customer3", PurchaseOrder = purchaseOrder, VendorNumber = "VN125" }, + ]; + } + + [KernelFunction] + [Description("Search for invoices by vendor number")] + public IEnumerable SearchByVendorNumber([Description("The vendor number. Vendor numbers begin with a VN prefix.")] string vendorNumber) + { + return + [ + new Invoice { CustomerName = "Customer1", PurchaseOrder = "PO123", VendorNumber = vendorNumber }, + new Invoice { CustomerName = "Customer2", PurchaseOrder = "PO124", VendorNumber = vendorNumber }, + new Invoice { CustomerName = "Customer3", PurchaseOrder = "PO125", VendorNumber = vendorNumber }, + ]; + } + } + + /// + /// A plugin that provides methods to search for Invoices using different criteria. + /// + private sealed class InvoiceSearch + { + [KernelFunction] + [Description("Search for invoices by customer name or purchase order or vendor number.")] + public IEnumerable Search([Description("The invoice search request. It must contain either a customer name or a purchase order or a vendor number")] InvoiceSearchRequest searchRequest) + { + return + [ + new Invoice + { + CustomerName = searchRequest.CustomerName ?? "Customer1", + PurchaseOrder = searchRequest.PurchaseOrder ?? "PO123", + VendorNumber = searchRequest.VendorNumber ?? "VN123" + }, + new Invoice + { + CustomerName = searchRequest.CustomerName ?? "Customer2", + PurchaseOrder = searchRequest.PurchaseOrder ?? "PO124", + VendorNumber = searchRequest.VendorNumber ?? "VN124" + }, + new Invoice + { + CustomerName = searchRequest.CustomerName ?? "Customer3", + PurchaseOrder = searchRequest.PurchaseOrder ?? "PO125", + VendorNumber = searchRequest.VendorNumber ?? "VN125" + }, + ]; + } + } + + /// + /// Represents an invoice. + /// + private sealed class Invoice + { + public string CustomerName { get; set; } + public string PurchaseOrder { get; set; } + public string VendorNumber { get; set; } + } + + /// + /// Represents an invoice search request. + /// + [Description("The invoice search request.")] + private sealed class InvoiceSearchRequest + { + [Description("Optional, customer name.")] + public string? CustomerName { get; set; } + [Description("Optional, purchase order. Purchase orders begin with a PN prefix.")] + public string? PurchaseOrder { get; set; } + [Description("Optional, vendor number. Vendor numbers begin with a VN prefix.")] + public string? VendorNumber { get; set; } + } +} diff --git a/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs b/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs deleted file mode 100644 index 1b817fbc60fe..000000000000 --- a/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs +++ /dev/null @@ -1,325 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; - -namespace FunctionCalling; - -/// -/// These examples demonstrate two ways functions called by the OpenAI LLM can be invoked using the SK streaming and non-streaming AI API: -/// -/// 1. Automatic Invocation by SK: -/// Functions called by the LLM are invoked automatically by SK. The results of these function invocations -/// are automatically added to the chat history and returned to the LLM. The LLM reasons about the chat history -/// and generates the final response. -/// This approach is fully automated and requires no manual intervention from the caller. -/// -/// 2. Manual Invocation by a Caller: -/// Functions called by the LLM are returned to the AI API caller. The caller controls the invocation phase where -/// they may decide which function to call, when to call them, how to handle exceptions, call them in parallel or sequentially, etc. -/// The caller then adds the function results or exceptions to the chat history and returns it to the LLM, which reasons about it -/// and generates the final response. -/// This approach is manual and provides more control over the function invocation phase to the caller. -/// -public class OpenAI_FunctionCalling(ITestOutputHelper output) : BaseTest(output) -{ - /// - /// This example demonstrates auto function calling with a non-streaming prompt. - /// - [Fact] - public async Task RunNonStreamingPromptWithAutoFunctionCallingAsync() - { - Console.WriteLine("Auto function calling with a non-streaming prompt."); - - Kernel kernel = CreateKernel(); - - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - - Console.WriteLine(await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))); - } - - /// - /// This example demonstrates auto function calling with a streaming prompt. - /// - [Fact] - public async Task RunStreamingPromptAutoFunctionCallingAsync() - { - Console.WriteLine("Auto function calling with a streaming prompt."); - - Kernel kernel = CreateKernel(); - - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - - await foreach (StreamingKernelContent update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))) - { - Console.Write(update); - } - } - - /// - /// This example demonstrates manual function calling with a non-streaming chat API. - /// - [Fact] - public async Task RunNonStreamingChatAPIWithManualFunctionCallingAsync() - { - Console.WriteLine("Manual function calling with a non-streaming prompt."); - - // Create kernel and chat service - Kernel kernel = CreateKernel(); - - IChatCompletionService chat = kernel.GetRequiredService(); - - // Configure the chat service to enable manual function calling - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - // Create chat history with the initial user message - ChatHistory chatHistory = new(); - chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); - - while (true) - { - // Start or continue chat based on the chat history - ChatMessageContent result = await chat.GetChatMessageContentAsync(chatHistory, settings, kernel); - if (result.Content is not null) - { - Console.Write(result.Content); - } - - // Get function calls from the chat message content and quit the chat loop if no function calls are found. - IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result); - if (!functionCalls.Any()) - { - break; - } - - // Preserving the original chat message content with function calls in the chat history. - chatHistory.Add(result); - - // Iterating over the requested function calls and invoking them - foreach (FunctionCallContent functionCall in functionCalls) - { - try - { - // Invoking the function - FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); - - // Adding the function result to the chat history - chatHistory.Add(resultContent.ToChatMessage()); - } - catch (Exception ex) - { - // Adding function exception to the chat history. - chatHistory.Add(new FunctionResultContent(functionCall, ex).ToChatMessage()); - // or - //chatHistory.Add(new FunctionResultContent(functionCall, "Error details that LLM can reason about.").ToChatMessage()); - } - } - - Console.WriteLine(); - } - } - - /// - /// This example demonstrates manual function calling with a streaming chat API. - /// - [Fact] - public async Task RunStreamingChatAPIWithManualFunctionCallingAsync() - { - Console.WriteLine("Manual function calling with a streaming prompt."); - - // Create kernel and chat service - Kernel kernel = CreateKernel(); - - IChatCompletionService chat = kernel.GetRequiredService(); - - // Configure the chat service to enable manual function calling - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - // Create chat history with the initial user message - ChatHistory chatHistory = new(); - chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); - - while (true) - { - AuthorRole? authorRole = null; - var fccBuilder = new FunctionCallContentBuilder(); - - // Start or continue streaming chat based on the chat history - await foreach (var streamingContent in chat.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) - { - if (streamingContent.Content is not null) - { - Console.Write(streamingContent.Content); - } - authorRole ??= streamingContent.Role; - fccBuilder.Append(streamingContent); - } - - // Build the function calls from the streaming content and quit the chat loop if no function calls are found - var functionCalls = fccBuilder.Build(); - if (!functionCalls.Any()) - { - break; - } - - // Creating and adding chat message content to preserve the original function calls in the chat history. - // The function calls are added to the chat message a few lines below. - var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); - chatHistory.Add(fcContent); - - // Iterating over the requested function calls and invoking them - foreach (var functionCall in functionCalls) - { - // Adding the original function call to the chat message content - fcContent.Items.Add(functionCall); - - // Invoking the function - var functionResult = await functionCall.InvokeAsync(kernel); - - // Adding the function result to the chat history - chatHistory.Add(functionResult.ToChatMessage()); - } - - Console.WriteLine(); - } - } - - /// - /// This example demonstrates how a simulated function can be added to the chat history using a manual function calling approach. - /// - /// - /// Simulated functions are not called or requested by the LLM but are added to the chat history by the caller. - /// Simulated functions provide a way for callers to add additional information that, if provided via the prompt, would be ignored due to LLM training. - /// - [Fact] - public async Task RunNonStreamingPromptWithSimulatedFunctionAsync() - { - Console.WriteLine("Simulated function calling with a non-streaming prompt."); - - Kernel kernel = CreateKernel(); - - IChatCompletionService chat = kernel.GetRequiredService(); - - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - ChatHistory chatHistory = new(); - chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); - - while (true) - { - ChatMessageContent result = await chat.GetChatMessageContentAsync(chatHistory, settings, kernel); - if (result.Content is not null) - { - Console.Write(result.Content); - } - - chatHistory.Add(result); // Adding LLM response containing function calls(requests) to chat history as it's required by LLMs. - - IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result); - if (!functionCalls.Any()) - { - break; - } - - foreach (FunctionCallContent functionCall in functionCalls) - { - FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); // Executing each function. - - chatHistory.Add(resultContent.ToChatMessage()); - } - - // Adding a simulated function call to the connector response message - FunctionCallContent simulatedFunctionCall = new("weather-alert", id: "call_123"); - result.Items.Add(simulatedFunctionCall); - - // Adding a simulated function result to chat history - string simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities."; - chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage()); - - Console.WriteLine(); - } - } - - /// - /// This example demonstrates a console chat with content streaming capabilities that uses auto function calling. - /// - [Fact] - public async Task RunStreamingChatWithAutoFunctionCallingAsync() - { - Console.WriteLine("Auto function calling with a streaming chat"); - - Kernel kernel = CreateKernel(); - - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - IChatCompletionService chat = kernel.GetRequiredService(); - ChatHistory chatHistory = new(); - int iteration = 0; - - while (true) - { - Console.Write("Question (Type \"quit\" to leave): "); - - //string question = System.Console.ReadLine() ?? string.Empty; - - // Comment out this line and uncomment the one above to run in a console chat loop. - string question = iteration == 0 ? "Given the current time of day and weather, what is the likely color of the sky in Boston?" : "quit"; - - if (question == "quit") - { - break; - } - - chatHistory.AddUserMessage(question); - StringBuilder sb = new(); - await foreach (var update in chat.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) - { - if (update.Content is not null) - { - Console.Write(update.Content); - sb.Append(update.Content); - } - } - chatHistory.AddAssistantMessage(sb.ToString()); - Console.WriteLine(); - iteration++; - } - } - - private static Kernel CreateKernel() - { - // Create kernel - IKernelBuilder builder = Kernel.CreateBuilder(); - - // We recommend the usage of OpenAI latest models for the best experience with tool calling. - // i.e. gpt-3.5-turbo-1106 or gpt-4-1106-preview - builder.AddOpenAIChatCompletion("gpt-3.5-turbo-1106", TestConfiguration.OpenAI.ApiKey); - - builder.Services.AddLogging(services => services.AddConsole().SetMinimumLevel(LogLevel.Trace)); - - Kernel kernel = builder.Build(); - - // Add a plugin with some helper functions we want to allow the model to utilize. - kernel.ImportPluginFromFunctions("HelperFunctions", - [ - kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), - kernel.CreateFunctionFromMethod((string cityName) => - cityName switch - { - "Boston" => "61 and rainy", - "London" => "55 and cloudy", - "Miami" => "80 and sunny", - "Paris" => "60 and rainy", - "Tokyo" => "50 and sunny", - "Sydney" => "75 and sunny", - "Tel Aviv" => "80 and sunny", - _ => "31 and snowing", - }, "GetWeatherForCity", "Gets the current weather for the specified city"), - ]); - - return kernel; - } -} diff --git a/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs b/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs index 0b50562583ea..88a374b60bee 100644 --- a/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs +++ b/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs @@ -2,8 +2,8 @@ using System.Diagnostics; using System.Text.Json; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; +using OpenAI.Chat; namespace Functions; @@ -79,12 +79,12 @@ public FunctionResultTestDataGen(FunctionResult functionResult, long executionTi private TokenCounts? ParseTokenCounts() { - CompletionsUsage? usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage; + var usage = FunctionResult.Metadata?["Usage"] as ChatTokenUsage; return new TokenCounts( - completionTokens: usage?.CompletionTokens ?? 0, - promptTokens: usage?.PromptTokens ?? 0, - totalTokens: usage?.TotalTokens ?? 0); + completionTokens: usage?.OutputTokenCount ?? 0, + promptTokens: usage?.InputTokenCount ?? 0, + totalTokens: usage?.TotalTokenCount ?? 0); } private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() diff --git a/dotnet/samples/Concepts/Memory/Ollama_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/Ollama_EmbeddingGeneration.cs new file mode 100644 index 000000000000..5ba0a45440b2 --- /dev/null +++ b/dotnet/samples/Concepts/Memory/Ollama_EmbeddingGeneration.cs @@ -0,0 +1,35 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Embeddings; +using xRetry; + +#pragma warning disable format // Format item can be simplified +#pragma warning disable CA1861 // Avoid constant arrays as arguments + +namespace Memory; + +// The following example shows how to use Semantic Kernel with Ollama API. +public class Ollama_EmbeddingGeneration(ITestOutputHelper output) : BaseTest(output) +{ + [RetryFact(typeof(HttpOperationException))] + public async Task RunEmbeddingAsync() + { + Assert.NotNull(TestConfiguration.Ollama.EmbeddingModelId); + + Console.WriteLine("\n======= Ollama - Embedding Example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOllamaTextEmbeddingGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.EmbeddingModelId) + .Build(); + + var embeddingGenerator = kernel.GetRequiredService(); + + // Generate embeddings for each chunk. + var embeddings = await embeddingGenerator.GenerateEmbeddingsAsync(["John: Hello, how are you?\nRoger: Hey, I'm Roger!"]); + + Console.WriteLine($"Generated {embeddings.Count} embeddings for the provided text"); + } +} diff --git a/dotnet/samples/Concepts/Memory/Onnx_EmbeddingGeneration.cs b/dotnet/samples/Concepts/Memory/Onnx_EmbeddingGeneration.cs new file mode 100644 index 000000000000..8058349f7b0b --- /dev/null +++ b/dotnet/samples/Concepts/Memory/Onnx_EmbeddingGeneration.cs @@ -0,0 +1,82 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Embeddings; + +namespace Memory; + +// The following example shows how to use Semantic Kernel with Onnx GenAI API. +public class Onnx_EmbeddingGeneration(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Example using the service directly to get embeddings + /// + /// + /// Configuration example: + /// + /// + /// EmbeddingModelPath: + /// D:\huggingface\bge-micro-v2\onnx\model.onnx + /// + /// + /// EmbeddingVocabPath: + /// D:\huggingface\bge-micro-v2\vocab.txt + /// + /// + /// + [Fact] + public async Task RunEmbeddingAsync() + { + Assert.NotNull(TestConfiguration.Onnx.EmbeddingModelPath); // dotnet user-secrets set "Onnx:EmbeddingModelPath" "" + Assert.NotNull(TestConfiguration.Onnx.EmbeddingVocabPath); // dotnet user-secrets set "Onnx:EmbeddingVocabPath" "" + + Console.WriteLine("\n======= Onnx - Embedding Example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddBertOnnxTextEmbeddingGeneration(TestConfiguration.Onnx.EmbeddingModelPath, TestConfiguration.Onnx.EmbeddingVocabPath) + .Build(); + + var embeddingGenerator = kernel.GetRequiredService(); + + // Generate embeddings for each chunk. + var embeddings = await embeddingGenerator.GenerateEmbeddingsAsync(["John: Hello, how are you?\nRoger: Hey, I'm Roger!"]); + + Console.WriteLine($"Generated {embeddings.Count} embeddings for the provided text"); + } + + /// + /// Example using the service collection directly to get embeddings + /// + /// + /// Configuration example: + /// + /// + /// EmbeddingModelPath: + /// D:\huggingface\bge-micro-v2\onnx\model.onnx + /// + /// + /// EmbeddingVocabPath: + /// D:\huggingface\bge-micro-v2\vocab.txt + /// + /// + /// + [Fact] + public async Task RunServiceCollectionEmbeddingAsync() + { + Assert.NotNull(TestConfiguration.Onnx.EmbeddingModelPath); // dotnet user-secrets set "Onnx:EmbeddingModelPath" "" + Assert.NotNull(TestConfiguration.Onnx.EmbeddingVocabPath); // dotnet user-secrets set "Onnx:EmbeddingVocabPath" "" + + Console.WriteLine("\n======= Onnx - Embedding Example ========\n"); + + var services = new ServiceCollection() + .AddBertOnnxTextEmbeddingGeneration(TestConfiguration.Onnx.EmbeddingModelPath, TestConfiguration.Onnx.EmbeddingVocabPath); + var provider = services.BuildServiceProvider(); + var embeddingGenerator = provider.GetRequiredService(); + + // Generate embeddings for each chunk. + var embeddings = await embeddingGenerator.GenerateEmbeddingsAsync(["John: Hello, how are you?\nRoger: Hey, I'm Roger!"]); + + Console.WriteLine($"Generated {embeddings.Count} embeddings for the provided text"); + } +} diff --git a/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs b/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs index 04a74656e948..fb96579f32a1 100644 --- a/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs +++ b/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs @@ -1,7 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using Microsoft.ML.Tokenizers; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Text; namespace Memory; diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs index fbc313adebf4..883195b68df9 100644 --- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs +++ b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs @@ -4,7 +4,7 @@ using System.Text.Json; using System.Text.Unicode; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Memory; using Microsoft.SemanticKernel.Plugins.Memory; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs index db8e259f4e7a..cbfc5c1b0b24 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs @@ -3,7 +3,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using Memory.VectorStoreFixtures; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.Redis; using Microsoft.SemanticKernel.Data; using Microsoft.SemanticKernel.Embeddings; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs index 18f0e5b476ca..6aa4d84cebab 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs @@ -4,7 +4,7 @@ using Memory.VectorStoreFixtures; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Connectors.Redis; using Microsoft.SemanticKernel.Data; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs index 341e5c2bbda2..75013b8196ac 100644 --- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs +++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs @@ -2,7 +2,7 @@ using System.Text.Json; using Memory.VectorStoreFixtures; -using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.Qdrant; using Microsoft.SemanticKernel.Data; using Microsoft.SemanticKernel.Embeddings; diff --git a/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs b/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs new file mode 100644 index 000000000000..faa5d579d3b8 --- /dev/null +++ b/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs @@ -0,0 +1,201 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Azure.Identity; +using Memory.VectorStoreFixtures; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.Qdrant; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Embeddings; +using Qdrant.Client; + +namespace Memory; + +/// +/// Semantic Kernel provides a generic data model for vector stores that can be used with any +/// schema. The schema still has to be provided in the form of a record definition, but no +/// custom data model is required. +/// +/// The sample shows how to +/// 1. Upsert data using the generic data model and retrieve it from the vector store using a custom data model. +/// 2. Upsert data using a custom data model and retrieve it from the vector store using the generic data model. +/// +public class VectorStore_GenericDataModel_Interop(ITestOutputHelper output, VectorStoreQdrantContainerFixture qdrantFixture) : BaseTest(output), IClassFixture +{ + private static readonly JsonSerializerOptions s_indentedSerializerOptions = new() { WriteIndented = true }; + + private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(ulong)), + new VectorStoreRecordDataProperty("Term", typeof(string)), + new VectorStoreRecordDataProperty("Definition", typeof(string)), + new VectorStoreRecordVectorProperty("DefinitionEmbedding", typeof(ReadOnlyMemory)) { Dimensions = 1536 } + } + }; + + [Fact] + public async Task UpsertWithGenericRetrieveWithCustomAsync() + { + // Create an embedding generation service. + var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + new AzureCliCredential()); + + // Initiate the docker container and construct the vector store. + await qdrantFixture.ManualInitializeAsync(); + var vectorStore = new QdrantVectorStore(new QdrantClient("localhost")); + + // Get and create collection if it doesn't exist using the generic data model and record definition that defines the schema. + var genericDataModelCollection = vectorStore.GetCollection>("skglossary", s_vectorStoreRecordDefinition); + await genericDataModelCollection.CreateCollectionIfNotExistsAsync(); + + // Create glossary entries and generate embeddings for them. + var glossaryEntries = CreateGenericGlossaryEntries().ToList(); + var tasks = glossaryEntries.Select(entry => Task.Run(async () => + { + entry.Vectors["DefinitionEmbedding"] = await textEmbeddingGenerationService.GenerateEmbeddingAsync((string)entry.Data["Definition"]!); + })); + await Task.WhenAll(tasks); + + // Upsert the glossary entries into the collection and return their keys. + var upsertedKeysTasks = glossaryEntries.Select(x => genericDataModelCollection.UpsertAsync(x)); + var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); + + // Get the collection using the custom data model. + var customDataModelCollection = vectorStore.GetCollection("skglossary"); + + // Retrieve one of the upserted records from the collection. + var upsertedRecord = await customDataModelCollection.GetAsync(upsertedKeys.First(), new() { IncludeVectors = true }); + + // Write upserted keys and one of the upserted records to the console. + Console.WriteLine($"Upserted keys: {string.Join(", ", upsertedKeys)}"); + Console.WriteLine($"Upserted record: {JsonSerializer.Serialize(upsertedRecord, s_indentedSerializerOptions)}"); + } + + [Fact] + public async Task UpsertWithCustomRetrieveWithGenericAsync() + { + // Create an embedding generation service. + var textEmbeddingGenerationService = new AzureOpenAITextEmbeddingGenerationService( + TestConfiguration.AzureOpenAIEmbeddings.DeploymentName, + TestConfiguration.AzureOpenAIEmbeddings.Endpoint, + new AzureCliCredential()); + + // Initiate the docker container and construct the vector store. + await qdrantFixture.ManualInitializeAsync(); + var vectorStore = new QdrantVectorStore(new QdrantClient("localhost")); + + // Get and create collection if it doesn't exist using the custom data model. + var customDataModelCollection = vectorStore.GetCollection("skglossary"); + await customDataModelCollection.CreateCollectionIfNotExistsAsync(); + + // Create glossary entries and generate embeddings for them. + var glossaryEntries = CreateCustomGlossaryEntries().ToList(); + var tasks = glossaryEntries.Select(entry => Task.Run(async () => + { + entry.DefinitionEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(entry.Definition); + })); + await Task.WhenAll(tasks); + + // Upsert the glossary entries into the collection and return their keys. + var upsertedKeysTasks = glossaryEntries.Select(x => customDataModelCollection.UpsertAsync(x)); + var upsertedKeys = await Task.WhenAll(upsertedKeysTasks); + + // Get the collection using the generic data model. + var genericDataModelCollection = vectorStore.GetCollection>("skglossary", s_vectorStoreRecordDefinition); + + // Retrieve one of the upserted records from the collection. + var upsertedRecord = await genericDataModelCollection.GetAsync(upsertedKeys.First(), new() { IncludeVectors = true }); + + // Write upserted keys and one of the upserted records to the console. + Console.WriteLine($"Upserted keys: {string.Join(", ", upsertedKeys)}"); + Console.WriteLine($"Upserted record: {JsonSerializer.Serialize(upsertedRecord, s_indentedSerializerOptions)}"); + } + + /// + /// Sample model class that represents a glossary entry. + /// + /// + /// Note that each property is decorated with an attribute that specifies how the property should be treated by the vector store. + /// This allows us to create a collection in the vector store and upsert and retrieve instances of this class without any further configuration. + /// + private sealed class Glossary + { + [VectorStoreRecordKey] + public ulong Key { get; set; } + + [VectorStoreRecordData] + public string Term { get; set; } + + [VectorStoreRecordData] + public string Definition { get; set; } + + [VectorStoreRecordVector(1536)] + public ReadOnlyMemory DefinitionEmbedding { get; set; } + } + + /// + /// Create some sample glossary entries using the custom data model. + /// + /// A list of sample glossary entries. + private static IEnumerable CreateCustomGlossaryEntries() + { + yield return new Glossary + { + Key = 1, + Term = "API", + Definition = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data.", + }; + + yield return new Glossary + { + Key = 2, + Term = "Connectors", + Definition = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc.", + }; + + yield return new Glossary + { + Key = 3, + Term = "RAG", + Definition = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a userโ€™s question (prompt).", + }; + } + + /// + /// Create some sample glossary entries using the generic data model. + /// + /// A list of sample glossary entries. + private static IEnumerable> CreateGenericGlossaryEntries() + { + yield return new VectorStoreGenericDataModel(1) + { + Data = new Dictionary + { + ["Term"] = "API", + ["Definition"] = "Application Programming Interface. A set of rules and specifications that allow software components to communicate and exchange data.", + } + }; + + yield return new VectorStoreGenericDataModel(2) + { + Data = new Dictionary + { + ["Term"] = "Connectors", + ["Definition"] = "Connectors allow you to integrate with various services provide AI capabilities, including LLM, AudioToText, TextToAudio, Embedding generation, etc.", + } + }; + + yield return new VectorStoreGenericDataModel(3) + { + Data = new Dictionary + { + ["Term"] = "RAG", + ["Definition"] = "Retrieval Augmented Generation - a term that refers to the process of retrieving additional data to provide as context to an LLM to use when generating a response (completion) to a userโ€™s question (prompt).", + } + }; + } +} diff --git a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs index bd1766a61597..861034b5d336 100644 --- a/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs +++ b/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs @@ -66,7 +66,7 @@ public async Task UsingVectorSearchWithKernelAsync() await pluginStore.SaveAsync(CollectionName, kernel.Plugins); // Enable automatic function calling by default. - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Define kernel arguments with specific request. var kernelArguments = new KernelArguments(executionSettings) { ["Request"] = "Provide latest headlines" }; @@ -91,9 +91,9 @@ public async Task UsingVectorSearchWithKernelAsync() // Invoke the request with plugin selection filter. Console.WriteLine("\nRun with filter:"); - // ToolCallBehavior.AutoInvokeKernelFunctions is used here as well as defined above. - // In case there will be related functions found for specific request, the ToolCallBehavior will be updated in filter to - // ToolCallBehavior.EnableFunctions(functions, autoInvoke: true) - this will allow to share only related set of functions with AI. + // FunctionChoiceBehavior.Auto() is used here as well as defined above. + // In case there will be related functions found for specific request, the FunctionChoiceBehavior will be updated in filter to + // FunctionChoiceBehavior.Auto(functions) - this will allow to share only related set of functions with AI. result = await kernel.InvokePromptAsync("{{$Request}}", kernelArguments); Console.WriteLine(result); @@ -140,7 +140,7 @@ public async Task UsingVectorSearchWithChatCompletionAsync() await pluginStore.SaveAsync(CollectionName, kernel.Plugins); // Enable automatic function calling by default. - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Get function provider and find best functions for specified prompt. var functionProvider = kernel.GetRequiredService(); @@ -155,11 +155,8 @@ public async Task UsingVectorSearchWithChatCompletionAsync() bestFunctions.ForEach(function => logger.LogInformation("Best function found: {PluginName}-{FunctionName}", function.PluginName, function.Name)); - // Convert selected functions to OpenAI functions. - var openAIFunctions = bestFunctions.Select(function => function.Metadata.ToOpenAIFunction()); - // Share only selected functions with AI. - executionSettings.ToolCallBehavior = ToolCallBehavior.EnableFunctions(openAIFunctions, autoInvoke: true); + executionSettings.FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(bestFunctions); } // Get chat completion service and execute a request. @@ -228,11 +225,8 @@ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, F if (promptExecutionSettings is not null && promptExecutionSettings is OpenAIPromptExecutionSettings openAIPromptExecutionSettings) { - // Convert selected functions to OpenAI functions. - var openAIFunctions = functions.Select(function => function.Metadata.ToOpenAIFunction()); - // Share only selected functions with AI. - openAIPromptExecutionSettings.ToolCallBehavior = ToolCallBehavior.EnableFunctions(openAIFunctions, autoInvoke: true); + openAIPromptExecutionSettings.FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(functions); return new() { [PromptExecutionSettings.DefaultServiceId] = openAIPromptExecutionSettings }; } diff --git a/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs b/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs index 4c287a63a216..d506f9ede325 100644 --- a/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs +++ b/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs @@ -7,13 +7,15 @@ using System.Security.Cryptography; using System.Text; using System.Text.Json; -using Azure.AI.OpenAI; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Planning; +using OpenAI.Chat; + +using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent; namespace Planners; @@ -54,7 +56,7 @@ public async Task SideBySideComparisonWithStepwisePlannerAsync() // 1.2 Plan execution using Auto Function Calling. var functionCallingChatHistory = new ChatHistory(); var chatCompletionService = kernel.GetRequiredService(); - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; functionCallingChatHistory.AddUserMessage(Goal); @@ -88,7 +90,7 @@ public async Task PlanExecutionOptionsAsync() { var kernel = GetKernel(); - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // If result is the only thing that is needed without generated plan, it's possible to create and execute a plan using Kernel object. var kernelResult = await kernel.InvokePromptAsync(Goal, new(executionSettings)); @@ -119,7 +121,7 @@ public async Task TelemetryForPlanGenerationAndExecutionAsync() { var kernel = GetKernel(enableLogging: true); - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result = await kernel.InvokePromptAsync(Goal, new(executionSettings)); @@ -161,7 +163,7 @@ public async Task TelemetryForPlanGenerationAndExecutionAsync() public async Task PlanCachingForReusabilityAsync() { var kernel = GetKernel(); - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Wrap chat completion service from Kernel in caching decorator. var chatCompletionService = new CachedChatCompletionService(kernel.GetRequiredService()); @@ -204,7 +206,7 @@ public async Task UsingFiltersToControlPlanExecutionAsync() kernel.FunctionInvocationFilters.Add(new PlanExecutionFilter()); - var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; var result = await kernel.InvokePromptAsync(Goal, new(executionSettings)); @@ -328,10 +330,10 @@ private int GetChatHistoryTokens(ChatHistory? chatHistory) { if (message.Metadata is not null && message.Metadata.TryGetValue("Usage", out object? usage) && - usage is CompletionsUsage completionsUsage && + usage is ChatTokenUsage completionsUsage && completionsUsage is not null) { - tokens += completionsUsage.TotalTokens; + tokens += completionsUsage.TotalTokenCount; } } diff --git a/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs deleted file mode 100644 index e6c94622ddd6..000000000000 --- a/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs +++ /dev/null @@ -1,252 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http.Headers; -using System.Net.Mime; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.OpenApi; -using Resources; - -namespace Plugins; - -[Obsolete("OpenAI plugins are deprecated and will be removed in a future version.")] -public class CreatePluginFromOpenAI_AzureKeyVault(ITestOutputHelper output) : BaseTest(output) -{ - private const string SecretName = "Foo"; - private const string SecretValue = "Bar"; - - /// - /// This example demonstrates how to connect an Azure Key Vault plugin to the Semantic Kernel. - /// To use this example, there are a few requirements: - /// 1. Register a client application with the Microsoft identity platform. - /// https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app - /// - /// 2. Create an Azure Key Vault - /// https://learn.microsoft.com/en-us/azure/key-vault/general/quick-create-portal - /// - /// 3. Add a permission for Azure Key Vault to your client application - /// https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-configure-app-access-web-apis - /// - /// 4. Set your Key Vault endpoint, client ID, and client secret as user secrets using: - /// dotnet user-secrets set "KeyVault:Endpoint" "your_endpoint" - /// dotnet user-secrets set "KeyVault:ClientId" "your_client_id" - /// dotnet user-secrets set "KeyVault:ClientSecret" "your_secret" - /// - /// 5. Replace your tenant ID with the "TENANT_ID" placeholder in dotnet/samples/Concepts/Resources/22-ai-plugin.json - /// - [Fact(Skip = "Setup credentials")] - public async Task RunAsync() - { - var authenticationProvider = new OpenAIAuthenticationProvider( - new Dictionary>() - { - { - "login.microsoftonline.com", - new Dictionary() - { - { "client_id", TestConfiguration.KeyVault.ClientId }, - { "client_secret", TestConfiguration.KeyVault.ClientSecret }, - { "grant_type", "client_credentials" } - } - } - } - ); - - Kernel kernel = new(); - - var openApiSpec = EmbeddedResource.Read("22-openapi.json"); - using var messageStub = new HttpMessageHandlerStub(openApiSpec); - using var httpClient = new HttpClient(messageStub); - - // Import Open AI Plugin - var openAIManifest = EmbeddedResource.ReadStream("22-ai-plugin.json"); - var plugin = await kernel.ImportPluginFromOpenAIAsync( - "AzureKeyVaultPlugin", - openAIManifest!, - new OpenAIFunctionExecutionParameters - { - AuthCallback = authenticationProvider.AuthenticateRequestAsync, - HttpClient = httpClient, - EnableDynamicPayload = true, - ServerUrlOverride = new Uri(TestConfiguration.KeyVault.Endpoint) - }); - - await AddSecretToAzureKeyVaultAsync(kernel, plugin); - await GetSecretFromAzureKeyVaultWithRetryAsync(kernel, plugin); - } - - private async Task AddSecretToAzureKeyVaultAsync(Kernel kernel, KernelPlugin plugin) - { - // Add arguments for required parameters, arguments for optional ones can be skipped. - var arguments = new KernelArguments - { - ["secret-name"] = SecretName, - ["value"] = SecretValue, - ["api-version"] = "7.0", - ["enabled"] = "true", - }; - - // Run - var functionResult = await kernel.InvokeAsync(plugin["SetSecret"], arguments); - - var result = functionResult.GetValue(); - - Console.WriteLine("SetSecret function result: {0}", result?.Content?.ToString()); - } - - private async Task GetSecretFromAzureKeyVaultWithRetryAsync(Kernel kernel, KernelPlugin plugin) - { - // Add arguments for required parameters, arguments for optional ones can be skipped. - var arguments = new KernelArguments - { - ["secret-name"] = SecretName, - ["api-version"] = "7.0" - }; - - // Run - var functionResult = await kernel.InvokeAsync(plugin["GetSecret"], arguments); - - var result = functionResult.GetValue(); - - Console.WriteLine("GetSecret function result: {0}", result?.Content?.ToString()); - } -} - -#region Utility Classes - -/// -/// Provides authentication for HTTP requests to OpenAI using OAuth or verification tokens. -/// -[Obsolete("OpenAI plugins are deprecated and will be removed in a future version.")] -internal sealed class OpenAIAuthenticationProvider(Dictionary>? oAuthValues = null, Dictionary? credentials = null) -{ - private readonly Dictionary> _oAuthValues = oAuthValues ?? []; -#pragma warning disable CA1823, RCS1213 // TODO: Use credentials - private readonly Dictionary _credentials = credentials ?? []; -#pragma warning restore CA1823 - - /// - /// Applies the authentication content to the provided HTTP request message. - /// - /// The HTTP request message. - /// Name of the plugin - /// The used to authenticate. - /// The cancellation token. - public async Task AuthenticateRequestAsync(HttpRequestMessage request, string pluginName, OpenAIAuthenticationConfig openAIAuthConfig, CancellationToken cancellationToken = default) - { - if (openAIAuthConfig.Type == OpenAIAuthenticationType.None) - { - return; - } - - string scheme = ""; - string credential = ""; - - if (openAIAuthConfig.Type == OpenAIAuthenticationType.OAuth) - { - var domainOAuthValues = this._oAuthValues[openAIAuthConfig.AuthorizationUrl!.Host] - ?? throw new KernelException("No OAuth values found for the provided authorization URL."); - - var values = new Dictionary(domainOAuthValues) { - { "scope", openAIAuthConfig.Scope ?? "" }, - }; - - using HttpContent? requestContent = openAIAuthConfig.AuthorizationContentType switch - { - "application/x-www-form-urlencoded" => new FormUrlEncodedContent(values), - "application/json" => new StringContent(JsonSerializer.Serialize(values), Encoding.UTF8, "application/json"), - _ => throw new KernelException($"Unsupported authorization content type: {openAIAuthConfig.AuthorizationContentType}"), - }; - - // Request the token - using var client = new HttpClient(); - using var authRequest = new HttpRequestMessage(HttpMethod.Post, openAIAuthConfig.AuthorizationUrl) { Content = requestContent }; - var response = await client.SendAsync(authRequest, cancellationToken).ConfigureAwait(false); - - response.EnsureSuccessStatusCode(); - - // Read the token - var responseContent = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - OAuthTokenResponse? tokenResponse; - try - { - tokenResponse = JsonSerializer.Deserialize(responseContent); - } - catch (JsonException) - { - throw new KernelException($"Failed to deserialize token response from {openAIAuthConfig.AuthorizationUrl}."); - } - - // Get the token type and value - scheme = tokenResponse?.TokenType ?? throw new KernelException("No token type found in the response."); - credential = tokenResponse?.AccessToken ?? throw new KernelException("No access token found in the response."); - } - else - { - var token = openAIAuthConfig.VerificationTokens?[pluginName] - ?? throw new KernelException("No verification token found for the provided plugin name."); - - scheme = openAIAuthConfig.AuthorizationType.ToString(); - credential = token; - } - - request.Headers.Authorization = new AuthenticationHeaderValue(scheme, credential); - } -} - -/// -/// Represents the authentication section for an OpenAI plugin. -/// -internal sealed class OAuthTokenResponse -{ - /// - /// The type of access token. - /// - [JsonPropertyName("token_type")] - public string TokenType { get; set; } = ""; - - /// - /// The authorization scope. - /// - [JsonPropertyName("access_token")] - public string AccessToken { get; set; } = ""; -} - -internal sealed class HttpMessageHandlerStub : DelegatingHandler -{ - public HttpResponseMessage ResponseToReturn { get; set; } - - public HttpMessageHandlerStub(string responseToReturn) - { - this.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(responseToReturn, Encoding.UTF8, MediaTypeNames.Application.Json) - }; - } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (request.RequestUri!.Scheme.Equals("file", StringComparison.OrdinalIgnoreCase)) - { - return this.ResponseToReturn; - } - - using var httpClient = new HttpClient(); - using var newRequest = new HttpRequestMessage() // construct a new request because the same one cannot be sent twice - { - Content = request.Content, - Method = request.Method, - RequestUri = request.RequestUri, - }; - - foreach (var header in request.Headers) - { - newRequest.Headers.Add(header.Key, header.Value); - } - return await httpClient.SendAsync(newRequest, cancellationToken).ConfigureAwait(false); - } -} - -#endregion diff --git a/dotnet/samples/Concepts/Plugins/TransformPlugin.cs b/dotnet/samples/Concepts/Plugins/TransformPlugin.cs index 76da3ef8f531..561802349191 100644 --- a/dotnet/samples/Concepts/Plugins/TransformPlugin.cs +++ b/dotnet/samples/Concepts/Plugins/TransformPlugin.cs @@ -92,7 +92,7 @@ public async Task MissingRequiredInformationAsync() Kernel kernel = kernelBuilder.Build(); // Invoke the kernel with a prompt and allow the AI to automatically invoke functions - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine(await kernel.InvokePromptAsync("What is my favourite color?", new(settings))); // Example response @@ -121,7 +121,7 @@ public async Task CreatePluginWithAlteredParametersAsync() Kernel kernel = kernelBuilder.Build(); // Invoke the kernel with a prompt and allow the AI to automatically invoke functions - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine(await kernel.InvokePromptAsync("What is my favourite color?", new(settings))); Console.WriteLine(await kernel.InvokePromptAsync("What is my favourite cold-blooded animal?", new(settings))); Console.WriteLine(await kernel.InvokePromptAsync("What is my favourite marine animal?", new(settings))); diff --git a/dotnet/samples/Concepts/PromptTemplates/ChatLoopWithPrompt.cs b/dotnet/samples/Concepts/PromptTemplates/ChatLoopWithPrompt.cs new file mode 100644 index 000000000000..89c274fb8b9f --- /dev/null +++ b/dotnet/samples/Concepts/PromptTemplates/ChatLoopWithPrompt.cs @@ -0,0 +1,64 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace PromptTemplates; + +public sealed class ChatLoopWithPrompt(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// This sample demonstrates how to render a chat history to a + /// prompt and use chat completion prompts in a loop. + /// + [Fact] + public async Task ExecuteChatLoopAsPromptAsync() + { + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + var chatHistory = new ChatHistory(); + KernelArguments arguments = new() { { "chatHistory", chatHistory } }; + + string[] userMessages = [ + "What is Seattle?", + "What is the population of Seattle?", + "What is the area of Seattle?", + "What is the weather in Seattle?", + "What is the zip code of Seattle?", + "What is the elevation of Seattle?", + "What is the latitude of Seattle?", + "What is the longitude of Seattle?", + "What is the mayor of Seattle?" + ]; + + foreach (var userMessage in userMessages) + { + chatHistory.AddUserMessage(userMessage); + OutputLastMessage(chatHistory); + + var function = kernel.CreateFunctionFromPrompt( + new() + { + Template = + """ + {{#each (chatHistory)}} + {{Content}} + {{/each}} + """, + TemplateFormat = "handlebars" + }, + new HandlebarsPromptTemplateFactory() + ); + + var response = await kernel.InvokeAsync(function, arguments); + + chatHistory.AddAssistantMessage(response.ToString()); + OutputLastMessage(chatHistory); + } + } +} diff --git a/dotnet/samples/Concepts/PromptTemplates/HandlebarsVisionPrompts.cs b/dotnet/samples/Concepts/PromptTemplates/HandlebarsVisionPrompts.cs new file mode 100644 index 000000000000..195d281da570 --- /dev/null +++ b/dotnet/samples/Concepts/PromptTemplates/HandlebarsVisionPrompts.cs @@ -0,0 +1,51 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.PromptTemplates.Handlebars; + +namespace PromptTemplates; + +// This example shows how to use chat completion handlebars template prompts with base64 encoded images as a parameter. +public class HandlebarsVisionPrompts(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task RunAsync() + { + const string HandlebarsTemplate = """ + You are an AI assistant designed to help with image recognition tasks. + + {{request}} + {{imageData}} + + """; + + var kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + var templateFactory = new HandlebarsPromptTemplateFactory(); + var promptTemplateConfig = new PromptTemplateConfig() + { + Template = HandlebarsTemplate, + TemplateFormat = "handlebars", + Name = "Vision_Chat_Prompt", + }; + var function = kernel.CreateFunctionFromPrompt(promptTemplateConfig, templateFactory); + + var arguments = new KernelArguments(new Dictionary + { + {"request","Describe this image:"}, + {"imageData", "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAAXNSR0IArs4c6QAAACVJREFUKFNj/KTO/J+BCMA4iBUyQX1A0I10VAizCj1oMdyISyEAFoQbHwTcuS8AAAAASUVORK5CYII="} + }); + + var response = await kernel.InvokeAsync(function, arguments); + Console.WriteLine(response); + + /* + Output: + The image is a solid block of bright red color. There are no additional features, shapes, or textures present. + */ + } +} diff --git a/dotnet/samples/Concepts/ChatPrompts/SafeChatPrompts.cs b/dotnet/samples/Concepts/PromptTemplates/SafeChatPrompts.cs similarity index 96% rename from dotnet/samples/Concepts/ChatPrompts/SafeChatPrompts.cs rename to dotnet/samples/Concepts/PromptTemplates/SafeChatPrompts.cs index f7d323d95623..99a07662b4b1 100644 --- a/dotnet/samples/Concepts/ChatPrompts/SafeChatPrompts.cs +++ b/dotnet/samples/Concepts/PromptTemplates/SafeChatPrompts.cs @@ -2,13 +2,14 @@ using Microsoft.SemanticKernel; -namespace ChatPrompts; +namespace PromptTemplates; -public sealed class SafeChatPrompts : BaseTest, IDisposable +public sealed class SafeChatPrompts : BaseTest { private readonly LoggingHandler _handler; private readonly HttpClient _httpClient; private readonly Kernel _kernel; + private bool _isDisposed; public SafeChatPrompts(ITestOutputHelper output) : base(output) { @@ -25,10 +26,19 @@ public SafeChatPrompts(ITestOutputHelper output) : base(output) .Build(); } - public void Dispose() + protected override void Dispose(bool disposing) { - this._handler.Dispose(); - this._httpClient.Dispose(); + if (!this._isDisposed) + { + if (disposing) + { + this._handler.Dispose(); + this._httpClient.Dispose(); + } + + this._isDisposed = true; + } + base.Dispose(disposing); } /// diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md index 26eef28982a7..61845a53291c 100644 --- a/dotnet/samples/Concepts/README.md +++ b/dotnet/samples/Concepts/README.md @@ -2,7 +2,23 @@ Down below you can find the code snippets that demonstrate the usage of many Semantic Kernel features. -## Agents - Different ways of using [`Agents`](./Agents/README.md) +## Running the Tests + +You can run those tests using the IDE or the command line. To run the tests using the command line run the following command from the root of Concepts project: + +```text +dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=NameSpace.TestClass.TestMethod" +``` + +Example for `ChatCompletion/OpenAI_ChatCompletion.cs` file, targeting the `ChatPromptSync` test: + +```powershell +dotnet test -l "console;verbosity=detailed" --filter "FullyQualifiedName=ChatCompletion.OpenAI_ChatCompletion.ChatPromptSync" +``` + +## Table of Contents + +### Agents - Different ways of using [`Agents`](./Agents/README.md) - [ComplexChat_NestedShopper](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs) - [Legacy_AgentAuthoring](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs) @@ -17,22 +33,28 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [OpenAIAssistant_CodeInterpreter](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs) - [OpenAIAssistant_Retrieval](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs) -## AudioToText - Different ways of using [`AudioToText`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/AudioToText/IAudioToTextService.cs) services to extract text from audio +### AudioToText - Different ways of using [`AudioToText`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/AudioToText/IAudioToTextService.cs) services to extract text from audio - [OpenAI_AudioToText](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/AudioToText/OpenAI_AudioToText.cs) -## FunctionCalling - Examples on `Function Calling` with function call capable models +### FunctionCalling - Examples on `Function Calling` with function call capable models - [Gemini_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/FunctionCalling/Gemini_FunctionCalling.cs) -- [OpenAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs) +- [FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/FunctionCalling/FunctionCalling.cs) - [NexusRaven_HuggingFaceTextGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/FunctionCalling/NexusRaven_FunctionCalling.cs) +- [MultipleFunctionsVsParameters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/FunctionCalling/MultipleFunctionsVsParameters.cs) -## Caching - Examples of caching implementations +### Caching - Examples of caching implementations - [SemanticCachingWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Caching/SemanticCachingWithFilters.cs) -## ChatCompletion - Examples using [`ChatCompletion`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) messaging capable service with models +### ChatCompletion - Examples using [`ChatCompletion`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) messaging capable service with models +- [AzureAIInference_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletion.cs) +- [AzureAIInference_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureAIInference_ChatCompletionStreaming.cs) +- [AzureOpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs) +- [AzureOpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs) +- [AzureOpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_CustomClient.cs) - [AzureOpenAIWithData_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs) - [ChatHistoryAuthorName](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs) - [ChatHistorySerialization](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/ChatHistorySerialization.cs) @@ -44,26 +66,28 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [Google_GeminiGetModelResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiGetModelResult.cs) - [Google_GeminiVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs) - [OpenAI_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs) -- [OpenAI_ChatCompletionMultipleChoices](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs) - [OpenAI_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs) -- [OpenAI_ChatCompletionStreamingMultipleChoices](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs) - [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs) -- [OpenAI_CustomAzureOpenAIClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs) +- [OpenAI_CustomClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomClient.cs) - [OpenAI_UsingLogitBias](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_UsingLogitBias.cs) - [OpenAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs) - [OpenAI_ReasonedFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ReasonedFunctionCalling.cs) - [MistralAI_ChatPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatPrompt.cs) - [MistralAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs) - [MistralAI_StreamingFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_StreamingFunctionCalling.cs) +- [Onnx_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletion.cs) +- [Onnx_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Onnx_ChatCompletionStreaming.cs) +- [Ollama_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs) +- [Ollama_ChatCompletionStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs) -## DependencyInjection - Examples on using `DI Container` +### DependencyInjection - Examples on using `DI Container` - [HttpClient_Registration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/DependencyInjection/HttpClient_Registration.cs) - [HttpClient_Resiliency](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/DependencyInjection/HttpClient_Resiliency.cs) - [Kernel_Building](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/DependencyInjection/Kernel_Building.cs) - [Kernel_Injecting](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs) -## Filtering - Different ways of filtering +### Filtering - Different ways of filtering - [AutoFunctionInvocationFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs) - [FunctionInvocationFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/FunctionInvocationFiltering.cs) @@ -73,7 +97,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [PIIDetectionWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PIIDetectionWithFilters.cs) - [TelemetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs) -## Functions - Invoking [`Method`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs) or [`Prompt`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs) functions with [`Kernel`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Kernel.cs) +### Functions - Invoking [`Method`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs) or [`Prompt`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs) functions with [`Kernel`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Kernel.cs) - [Arguments](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/Arguments.cs) - [FunctionResult_Metadata](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/FunctionResult_Metadata.cs) @@ -85,17 +109,19 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [PromptFunctions_Inline](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/PromptFunctions_Inline.cs) - [PromptFunctions_MultipleArguments](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/PromptFunctions_MultipleArguments.cs) -## ImageToText - Using [`ImageToText`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ImageToText/IImageToTextService.cs) services to describe images +### ImageToText - Using [`ImageToText`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ImageToText/IImageToTextService.cs) services to describe images - [HuggingFace_ImageToText](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ImageToText/HuggingFace_ImageToText.cs) -## LocalModels - Running models locally +### LocalModels - Running models locally - [HuggingFace_ChatCompletionWithTGI](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/LocalModels/HuggingFace_ChatCompletionWithTGI.cs) - [MultipleProviders_ChatCompletion](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/LocalModels/MultipleProviders_ChatCompletion.cs) -## Memory - Using AI [`Memory`](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel.Abstractions/Memory) concepts +### Memory - Using AI [`Memory`](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel.Abstractions/Memory) concepts +- [Ollama_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/Ollama_EmbeddingGeneration.cs) +- [Onnx_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/Onnx_EmbeddingGeneration.cs) - [HuggingFace_EmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/HuggingFace_EmbeddingGeneration.cs) - [MemoryStore_CustomReadOnly](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/MemoryStore_CustomReadOnly.cs) - [SemanticTextMemory_Building](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/SemanticTextMemory_Building.cs) @@ -107,23 +133,23 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [VectorStore_DataIngestion_Simple: A simple example of how to do data ingestion into a vector store when getting started.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs) - [VectorStore_DataIngestion_MultiStore: An example of data ingestion that uses the same code to ingest into multiple vector stores types.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs) - [VectorStore_DataIngestion_CustomMapper: An example that shows how to use a custom mapper for when your data model and storage model doesn't match.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs) +- [VectorStore_GenericDataModel_Interop: An example that shows how you can use the built-in, generic data model from Semantic Kernel to read and write to a Vector Store.](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/VectorStore_GenericDataModel_Interop.cs) -## Optimization - Examples of different cost and performance optimization techniques +### Optimization - Examples of different cost and performance optimization techniques - [FrugalGPTWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Optimization/FrugalGPTWithFilters.cs) - [PluginSelectionWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Optimization/PluginSelectionWithFilters.cs) -## Planners - Examples on using `Planners` +### Planners - Examples on using `Planners` - [AutoFunctionCallingPlanning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs) - [FunctionCallStepwisePlanning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Planners/FunctionCallStepwisePlanning.cs) - [HandlebarsPlanning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Planners/HandlebarsPlanning.cs) -## Plugins - Different ways of creating and using [`Plugins`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs) +### Plugins - Different ways of creating and using [`Plugins`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Functions/KernelPlugin.cs) - [ApiManifestBasedPlugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/ApiManifestBasedPlugins.cs) - [ConversationSummaryPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/ConversationSummaryPlugin.cs) -- [CreatePluginFromOpenAI_AzureKeyVault](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs)(Deprecated) - [CreatePluginFromOpenApiSpec_Github](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Github.cs) - [CreatePluginFromOpenApiSpec_Jira](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Jira.cs) - [CreatePluginFromOpenApiSpec_Klarna](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs) @@ -134,7 +160,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [ImportPluginFromGrpc](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/ImportPluginFromGrpc.cs) - [TransformPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/TransformPlugin.cs) -## PromptTemplates - Using [`Templates`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplate.cs) with parametrization for `Prompt` rendering +### PromptTemplates - Using [`Templates`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplate.cs) with parametrization for `Prompt` rendering - [ChatCompletionPrompts](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/ChatCompletionPrompts.cs) - [ChatWithPrompts](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/ChatWithPrompts.cs) @@ -142,29 +168,32 @@ Down below you can find the code snippets that demonstrate the usage of many Sem - [MultiplePromptTemplates](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/MultiplePromptTemplates.cs) - [PromptFunctionsWithChatGPT](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/PromptFunctionsWithChatGPT.cs) - [TemplateLanguage](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/TemplateLanguage.cs) -- [PromptyFunction](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptYemplates/PromptyFunction.cs) +- [PromptyFunction](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/PromptyFunction.cs) +- [HandlebarsVisionPrompts](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/HandlebarsVisionPrompts.cs) +- [SafeChatPrompts](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/SafeChatPrompts.cs) +- [ChatLoopWithPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/PromptTemplates/ChatLoopWithPrompt.cs) -## RAG - Retrieval-Augmented Generation +### RAG - Retrieval-Augmented Generation - [WithFunctionCallingStepwisePlanner](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/RAG/WithFunctionCallingStepwisePlanner.cs) - [WithPlugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/RAG/WithPlugins.cs) -## Search - Search services information +### Search - Search services information - [BingAndGooglePlugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Search/BingAndGooglePlugins.cs) - [MyAzureAISearchPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Search/MyAzureAISearchPlugin.cs) - [WebSearchQueriesPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Search/WebSearchQueriesPlugin.cs) -## TextGeneration - [`TextGeneration`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/ITextGenerationService.cs) capable service with models +### TextGeneration - [`TextGeneration`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/ITextGenerationService.cs) capable service with models - [Custom_TextGenerationService](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/TextGeneration/Custom_TextGenerationService.cs) - [HuggingFace_TextGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/TextGeneration/HuggingFace_TextGeneration.cs) - [OpenAI_TextGenerationStreaming](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs) -## TextToAudio - Using [`TextToAudio`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextToAudio/ITextToAudioService.cs) services to generate audio +### TextToAudio - Using [`TextToAudio`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextToAudio/ITextToAudioService.cs) services to generate audio - [OpenAI_TextToAudio](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/TextToAudio/OpenAI_TextToAudio.cs) -## TextToImage - Using [`TextToImage`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs) services to generate images +### TextToImage - Using [`TextToImage`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs) services to generate images - [OpenAI_TextToImage](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageDalle3.cs) diff --git a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs index 7111e873cf4c..c383ea9025f1 100644 --- a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs +++ b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs @@ -7,12 +7,6 @@ namespace Plugins; public sealed class LegacyMenuPlugin { - public const string CorrelationIdArgument = "correlationId"; - - private readonly List _correlationIds = []; - - public IReadOnlyList CorrelationIds => this._correlationIds; - /// /// Returns a mock item menu. /// @@ -20,8 +14,6 @@ public sealed class LegacyMenuPlugin [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] public string[] GetSpecials(KernelArguments? arguments) { - CaptureCorrelationId(arguments, nameof(GetSpecials)); - return [ "Special Soup: Clam Chowder", @@ -39,8 +31,6 @@ public string GetItemPrice( string menuItem, KernelArguments? arguments) { - CaptureCorrelationId(arguments, nameof(GetItemPrice)); - return "$9.99"; } @@ -55,21 +45,6 @@ public bool IsItem86d( int count, KernelArguments? arguments) { - CaptureCorrelationId(arguments, nameof(IsItem86d)); - return count < 3; } - - private void CaptureCorrelationId(KernelArguments? arguments, string scope) - { - if (arguments?.TryGetValue(CorrelationIdArgument, out object? correlationId) ?? false) - { - string? correlationText = correlationId?.ToString(); - - if (!string.IsNullOrWhiteSpace(correlationText)) - { - this._correlationIds.Add($"{scope}:{correlationText}"); - } - } - } } diff --git a/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs deleted file mode 100644 index be82177eda5d..000000000000 --- a/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs +++ /dev/null @@ -1,34 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.ComponentModel; -using Microsoft.SemanticKernel; - -namespace Plugins; - -public sealed class MenuPlugin -{ - public const string CorrelationIdArgument = "correlationId"; - - private readonly List _correlationIds = []; - - public IReadOnlyList CorrelationIds => this._correlationIds; - - [KernelFunction, Description("Provides a list of specials from the menu.")] - [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } - - [KernelFunction, Description("Provides the price of the requested menu item.")] - public string GetItemPrice( - [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } -} diff --git a/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs b/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs new file mode 100644 index 000000000000..719d5eb9f951 --- /dev/null +++ b/dotnet/samples/Concepts/TextGeneration/Ollama_TextGeneration.cs @@ -0,0 +1,76 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TextGeneration; +using xRetry; + +#pragma warning disable format // Format item can be simplified +#pragma warning disable CA1861 // Avoid constant arrays as arguments + +namespace TextGeneration; + +// The following example shows how to use Semantic Kernel with Ollama Text Generation API. +public class Ollama_TextGeneration(ITestOutputHelper helper) : BaseTest(helper) +{ + [Fact] + public async Task KernelPromptAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("\n======== Ollama Text Generation example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOllamaTextGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var questionAnswerFunction = kernel.CreateFunctionFromPrompt("Question: {{$input}}; Answer:"); + + var result = await kernel.InvokeAsync(questionAnswerFunction, new() { ["input"] = "What is New York?" }); + + Console.WriteLine(result.GetValue()); + } + + [Fact] + public async Task ServicePromptAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + Console.WriteLine("\n======== Ollama Text Generation example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOllamaTextGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var service = kernel.GetRequiredService(); + var result = await service.GetTextContentAsync("Question: What is New York?; Answer:"); + + Console.WriteLine(result); + } + + [RetryFact(typeof(HttpOperationException))] + public async Task RunStreamingExampleAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + string model = TestConfiguration.Ollama.ModelId; + + Console.WriteLine($"\n======== HuggingFace {model} streaming example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOllamaTextGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: TestConfiguration.Ollama.ModelId) + .Build(); + + var questionAnswerFunction = kernel.CreateFunctionFromPrompt("Question: {{$input}}; Answer:"); + + await foreach (string text in kernel.InvokePromptStreamingAsync("Question: {{$input}}; Answer:", new() { ["input"] = "What is New York?" })) + { + Console.Write(text); + } + } +} diff --git a/dotnet/samples/Concepts/TextGeneration/Ollama_TextGenerationStreaming.cs b/dotnet/samples/Concepts/TextGeneration/Ollama_TextGenerationStreaming.cs new file mode 100644 index 000000000000..35e0c31074f4 --- /dev/null +++ b/dotnet/samples/Concepts/TextGeneration/Ollama_TextGenerationStreaming.cs @@ -0,0 +1,57 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TextGeneration; + +#pragma warning disable format // Format item can be simplified +#pragma warning disable CA1861 // Avoid constant arrays as arguments + +namespace TextGeneration; + +// The following example shows how to use Semantic Kernel with Ollama Text Generation API. +public class Ollama_TextGenerationStreaming(ITestOutputHelper helper) : BaseTest(helper) +{ + [Fact] + public async Task RunKernelStreamingExampleAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + string model = TestConfiguration.Ollama.ModelId; + + Console.WriteLine($"\n======== Ollama {model} streaming example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOllamaTextGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: model) + .Build(); + + await foreach (string text in kernel.InvokePromptStreamingAsync("Question: {{$input}}; Answer:", new() { ["input"] = "What is New York?" })) + { + Console.Write(text); + } + } + + [Fact] + public async Task RunServiceStreamingExampleAsync() + { + Assert.NotNull(TestConfiguration.Ollama.ModelId); + + string model = TestConfiguration.Ollama.ModelId; + + Console.WriteLine($"\n======== Ollama {model} streaming example ========\n"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOllamaTextGeneration( + endpoint: new Uri(TestConfiguration.Ollama.Endpoint), + modelId: model) + .Build(); + + var service = kernel.GetRequiredService(); + + await foreach (var content in service.GetStreamingTextContentsAsync("Question: What is New York?; Answer:")) + { + Console.Write(content); + } + } +} diff --git a/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs b/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs index 44b7806a1355..bb906bb6d05c 100644 --- a/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs +++ b/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.TextGeneration; @@ -22,11 +23,11 @@ public Task AzureOpenAITextGenerationStreamAsync() { Console.WriteLine("======== Azure OpenAI - Text Generation - Raw Streaming ========"); - var textGeneration = new AzureOpenAITextGenerationService( - deploymentName: TestConfiguration.AzureOpenAI.DeploymentName, + var textGeneration = new AzureOpenAIChatCompletionService( + deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, endpoint: TestConfiguration.AzureOpenAI.Endpoint, apiKey: TestConfiguration.AzureOpenAI.ApiKey, - modelId: TestConfiguration.AzureOpenAI.ModelId); + modelId: TestConfiguration.AzureOpenAI.ChatModelId); return this.TextGenerationStreamAsync(textGeneration); } @@ -36,7 +37,7 @@ public Task OpenAITextGenerationStreamAsync() { Console.WriteLine("======== Open AI - Text Generation - Raw Streaming ========"); - var textGeneration = new OpenAITextGenerationService("gpt-3.5-turbo-instruct", TestConfiguration.OpenAI.ApiKey); + var textGeneration = new OpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); return this.TextGenerationStreamAsync(textGeneration); } diff --git a/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImage.cs b/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImage.cs new file mode 100644 index 000000000000..96dbb53edb81 --- /dev/null +++ b/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImage.cs @@ -0,0 +1,184 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextToImage; + +namespace TextToImage; + +// The following example shows how to use Semantic Kernel with OpenAI DALL-E 2 to create images +public class OpenAI_TextToImage(ITestOutputHelper output) : BaseTest(output) +{ + [Fact] + public async Task OpenAIDallE2Async() + { + Console.WriteLine("======== OpenAI DALL-E 2 Text To Image ========"); + + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAITextToImage(TestConfiguration.OpenAI.ApiKey) // Add your text to image service + .AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) // Add your chat completion service + .Build(); + + ITextToImageService dallE = kernel.GetRequiredService(); + + var imageDescription = "A cute baby sea otter"; + var images = await dallE.GetImageContentsAsync(imageDescription, new OpenAITextToImageExecutionSettings { Size = (256, 256) }); + var image = images[0].Uri!.ToString(); + Console.WriteLine(imageDescription); + Console.WriteLine("Image URL: " + image); + + /* Output: + + A cute baby sea otter + Image URL: https://oaidalleapiprodscus.blob.core.windows.net/private/.... + + */ + + Console.WriteLine("======== Chat with images ========"); + + var chatGPT = kernel.GetRequiredService(); + var chatHistory = new ChatHistory( + "You're chatting with a user. Instead of replying directly to the user" + + " provide the description of an image that expresses what you want to say." + + " The user won't see your message, they will see only the image. The system " + + " generates an image using your description, so it's important you describe the image with details."); + + var msg = "Hi, I'm from Tokyo, where are you from?"; + chatHistory.AddUserMessage(msg); + Console.WriteLine("User: " + msg); + + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + images = await dallE.GetImageContentsAsync(reply.Content!, new OpenAITextToImageExecutionSettings { Size = (256, 256) }); + image = images[0].Uri!.ToString(); + Console.WriteLine("Bot: " + image); + Console.WriteLine("Img description: " + reply); + + msg = "Oh, wow. Not sure where that is, could you provide more details?"; + chatHistory.AddUserMessage(msg); + Console.WriteLine("User: " + msg); + + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + images = await dallE.GetImageContentsAsync(reply.Content!, new OpenAITextToImageExecutionSettings { Size = (256, 256) }); + image = images[0].Uri!.ToString(); + Console.WriteLine("Bot: " + image); + Console.WriteLine("Img description: " + reply); + + /* Output: + + User: Hi, I'm from Tokyo, where are you from? + Bot: https://oaidalleapiprodscus.blob.core.windows.net/private/... + Img description: [An image of a globe with a pin dropped on a location in the middle of the ocean] + + User: Oh, wow. Not sure where that is, could you provide more details? + Bot: https://oaidalleapiprodscus.blob.core.windows.net/private/... + Img description: [An image of a map zooming in on the pin location, revealing a small island with a palm tree on it] + + */ + } + + [Fact] + public async Task SimpleTextToImageExampleAsync() + { + var builder = Kernel.CreateBuilder() + .AddAzureOpenAITextToImage( // Add your text to image service + deploymentName: TestConfiguration.AzureOpenAI.ImageDeploymentName, + endpoint: TestConfiguration.AzureOpenAI.ImageEndpoint, + apiKey: TestConfiguration.AzureOpenAI.ImageApiKey, + modelId: TestConfiguration.AzureOpenAI.ImageModelId); + + var kernel = builder.Build(); + var service = kernel.GetRequiredService(); + + var generatedImages = await service.GetImageContentsAsync(new TextContent("A cute baby sea otter"), new OpenAITextToImageExecutionSettings { Size = (Width: 1792, Height: 1024) }); + + this.Output.WriteLine(generatedImages[0].Uri!.ToString()); + } + + [Fact] + public async Task OpenAIDallE3Async() + { + Console.WriteLine("======== OpenAI DALL-E 3 Text To Image ========"); + + var builder = Kernel.CreateBuilder() + .AddOpenAITextToImage( // Add your text to image service + modelId: "dall-e-3", + apiKey: TestConfiguration.OpenAI.ApiKey) //DALL-E 3 is only supported in this version + .AddOpenAIChatCompletion( // Add your chat completion service + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey); + + builder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry 5 times + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.MaxRetryAttempts = 5; + o.TotalRequestTimeout.Timeout = TimeSpan.FromSeconds(120); + }); + }); + + var kernel = builder.Build(); + + ITextToImageService dallE = kernel.GetRequiredService(); + var imageDescription = "A cute baby sea otter"; + var images = await dallE.GetImageContentsAsync(imageDescription, new OpenAITextToImageExecutionSettings { Size = (1024, 1024) }); + + Console.WriteLine(imageDescription); + Console.WriteLine("Image URL: " + images[0].Uri!); + + /* Output: + + A cute baby sea otter + Image URL: https://oaidalleapiprodscus.blob.core.windows.net/private/org-/.... + + */ + + Console.WriteLine("======== Chat with images ========"); + + var chatGPT = kernel.GetRequiredService(); + var chatHistory = new ChatHistory( + "You're chatting with a user. Instead of replying directly to the user" + + " provide the description of an image that expresses what you want to say." + + " The user won't see your message, they will see only the image. The system " + + " generates an image using your description, so it's important you describe the image with details."); + + var msg = "Hi, I'm from Tokyo, where are you from?"; + chatHistory.AddUserMessage(msg); + Console.WriteLine("User: " + msg); + + var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + images = await dallE.GetImageContentsAsync(reply.Content!, new OpenAITextToImageExecutionSettings { Size = (1024, 1024) }); + var image = images[0].Uri!.ToString(); + Console.WriteLine("Bot: " + image); + Console.WriteLine("Img description: " + reply); + + msg = "Oh, wow. Not sure where that is, could you provide more details?"; + chatHistory.AddUserMessage(msg); + Console.WriteLine("User: " + msg); + + reply = await chatGPT.GetChatMessageContentAsync(chatHistory); + chatHistory.Add(reply); + images = await dallE.GetImageContentsAsync(reply.Content!, new OpenAITextToImageExecutionSettings { Size = (1024, 1024) }); + image = images[0].Uri!.ToString(); + Console.WriteLine("Bot: " + image); + Console.WriteLine("Img description: " + reply); + + /* Output: + + User: Hi, I'm from Tokyo, where are you from? + Bot: https://dalleproduse.blob.core.windows.net/private/images/...... + Img description: [An image of a globe with a pin dropped on a location in the middle of the ocean] + + User: Oh, wow. Not sure where that is, could you provide more details? + Bot: https://dalleproduse.blob.core.windows.net/private/images/...... + Img description: [An image of a map zooming in on the pin location, revealing a small island with a palm tree on it] + + */ + } +} diff --git a/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageDalle3.cs b/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageLegacy.cs similarity index 96% rename from dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageDalle3.cs rename to dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageLegacy.cs index 32e78c9382a8..c2f208dd8334 100644 --- a/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageDalle3.cs +++ b/dotnet/samples/Concepts/TextToImage/OpenAI_TextToImageLegacy.cs @@ -8,8 +8,10 @@ namespace TextToImage; -// The following example shows how to use Semantic Kernel with OpenAI DALL-E 2 to create images -public class OpenAI_TextToImageDalle3(ITestOutputHelper output) : BaseTest(output) +/// +/// The following example shows how you can still use the previous "ITextToImageService.GenerateImageAsync" API to generate images. +/// +public class OpenAI_TextToImageLegacy(ITestOutputHelper output) : BaseTest(output) { [Fact] public async Task OpenAIDallEAsync() diff --git a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj index fb5862e3270a..4ce04e354cc8 100644 --- a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj +++ b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj @@ -2,10 +2,11 @@ Exe - net8.0;netstandard2.0 + net8.0 enable enable 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + $(NoWarn);CA2249;CS0612 @@ -14,6 +15,9 @@ + + + diff --git a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs index ff2767a289c8..4d324bacdcd1 100644 --- a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs +++ b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs @@ -11,7 +11,7 @@ namespace AIModelRouter; /// In a real-world scenario, you would use a more sophisticated routing mechanism, such as another local model for /// deciding which service to use based on the user's input or any other criteria. /// -public class CustomRouter() +internal sealed class CustomRouter() { /// /// Returns the best service id to use based on the user's input. @@ -21,7 +21,7 @@ public class CustomRouter() /// User's input prompt /// List of service ids to choose from in order of importance, defaulting to the first /// Service id. - public string FindService(string lookupPrompt, IReadOnlyList serviceIds) + internal string FindService(string lookupPrompt, IReadOnlyList serviceIds) { // The order matters, if the keyword is not found, the first one is used. foreach (var serviceId in serviceIds) diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs index 5bafa4934883..9d3631dbcb90 100644 --- a/dotnet/samples/Demos/AIModelRouter/Program.cs +++ b/dotnet/samples/Demos/AIModelRouter/Program.cs @@ -6,11 +6,11 @@ #pragma warning disable SKEXP0001 #pragma warning disable SKEXP0010 -#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf' +#pragma warning disable SKEXP0070 namespace AIModelRouter; -internal sealed partial class Program +internal sealed class Program { private static async Task Main(string[] args) { @@ -21,20 +21,69 @@ private static async Task Main(string[] args) ServiceCollection services = new(); // Adding multiple connectors targeting different providers / models. - services.AddKernel() /* LMStudio model is selected in server side. */ - .AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "N/A", endpoint: new Uri("http://localhost:1234"), apiKey: null) - .AddOpenAIChatCompletion(serviceId: "ollama", modelId: "phi3", endpoint: new Uri("http://localhost:11434"), apiKey: null) - .AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!) + services + .AddKernel() + .AddOpenAIChatCompletion( + serviceId: "lmstudio", + modelId: "N/A", // LMStudio model is pre defined in the UI List box. + endpoint: new Uri(config["LMStudio:Endpoint"] ?? "http://localhost:1234"), + apiKey: null); - // Adding a custom filter to capture router selected service id - .Services.AddSingleton(new SelectedServiceFilter()); + Console.ForegroundColor = ConsoleColor.DarkCyan; + Console.WriteLine("======== AI Services Added ========"); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("โ€ข LMStudio - Use \"lmstudio\" in the prompt."); + + if (config["Ollama:ModelId"] is not null) + { + services.AddOllamaChatCompletion( + serviceId: "ollama", + modelId: config["Ollama:ModelId"]!, + endpoint: new Uri(config["Ollama:Endpoint"] ?? "http://localhost:11434")); + + Console.WriteLine("โ€ข Ollama - Use \"ollama\" in the prompt."); + } + + if (config["OpenAI:ApiKey"] is not null) + { + services.AddOpenAIChatCompletion( + serviceId: "openai", + modelId: config["OpenAI:ModelId"] ?? "gpt-4o", + apiKey: config["OpenAI:ApiKey"]!); + + Console.WriteLine("โ€ข OpenAI Added - Use \"openai\" in the prompt."); + } + + if (config["Onnx:ModelPath"] is not null) + { + services.AddOnnxRuntimeGenAIChatCompletion( + serviceId: "onnx", + modelId: "phi-3", + modelPath: config["Onnx:ModelPath"]!); + + Console.WriteLine("โ€ข ONNX Added - Use \"onnx\" in the prompt."); + } + + if (config["AzureAIInference:Endpoint"] is not null) + { + services.AddAzureAIInferenceChatCompletion( + serviceId: "azureai", + endpoint: new Uri(config["AzureAIInference:Endpoint"]!), + apiKey: config["AzureAIInference:ApiKey"]); + + Console.WriteLine("โ€ข Azure AI Inference Added - Use \"azureai\" in the prompt."); + } + + // Adding a custom filter to capture router selected service id + services.AddSingleton(new SelectedServiceFilter()); var kernel = services.BuildServiceProvider().GetRequiredService(); var router = new CustomRouter(); + Console.ForegroundColor = ConsoleColor.White; while (true) { - Console.Write("\n\nUser > "); + Console.Write("\nUser > "); var userMessage = Console.ReadLine(); // Exit application if the user enters an empty message @@ -43,7 +92,7 @@ private static async Task Main(string[] args) // Find the best service to use based on the user's input KernelArguments arguments = new(new PromptExecutionSettings() { - ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai"]) + ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai", "onnx", "azureai"]) }); // Invoke the prompt and print the response @@ -51,6 +100,7 @@ private static async Task Main(string[] args) { Console.Write(chatChunk); } + Console.WriteLine(); } } } diff --git a/dotnet/samples/Demos/AIModelRouter/README.md b/dotnet/samples/Demos/AIModelRouter/README.md index 92ac37e7c81e..afb061ced3c2 100644 --- a/dotnet/samples/Demos/AIModelRouter/README.md +++ b/dotnet/samples/Demos/AIModelRouter/README.md @@ -21,8 +21,13 @@ The sample can be configured by using the command line with .NET [Secret Manager ### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) ```powershell -# OpenAI (Not required if using Azure OpenAI) dotnet user-secrets set "OpenAI:ApiKey" "... your api key ... " +dotnet user-secrets set "OpenAI:ModelId" ".. Openai model .. " (default: gpt-4o) +dotnet user-secrets set "Ollama:ModelId" ".. Ollama model id .. " +dotnet user-secrets set "Ollama:Endpoint" ".. Ollama endpoint .. " (default: http://localhost:11434) +dotnet user-secrets set "LMStudio:Endpoint" ".. LM Studio endpoint .. " (default: http://localhost:1234) +dotnet user-secrets set "Onnx:ModelId" ".. Onnx model id" +dotnet user-secrets set "Onnx:ModelPath" ".. your Onnx model folder path .." ``` ## Running the sample diff --git a/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs index 9824d57ebd55..0c5334fc58a0 100644 --- a/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs +++ b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs @@ -11,7 +11,7 @@ namespace AIModelRouter; /// /// Using a filter to log the service being used for the prompt. /// -public class SelectedServiceFilter : IPromptRenderFilter +internal sealed class SelectedServiceFilter : IPromptRenderFilter { /// public Task OnPromptRenderAsync(PromptRenderContext context, Func next) diff --git a/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj b/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj index 2f744127417e..863532cb474e 100644 --- a/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj +++ b/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj @@ -6,7 +6,7 @@ enable enable - $(NoWarn);CA2007;VSTHRD111 + $(NoWarn);CA2007;VSTHRD111;SKEXP0001 c478d0b2-7145-4d1a-9600-3130c04085cd @@ -22,7 +22,7 @@ - + diff --git a/dotnet/samples/Demos/BookingRestaurant/Program.cs b/dotnet/samples/Demos/BookingRestaurant/Program.cs index 253785ce722c..750c687e4280 100644 --- a/dotnet/samples/Demos/BookingRestaurant/Program.cs +++ b/dotnet/samples/Demos/BookingRestaurant/Program.cs @@ -106,7 +106,7 @@ // Enable auto function calling var executionSettings = new OpenAIPromptExecutionSettings { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Get the result from the AI diff --git a/dotnet/samples/Demos/CodeInterpreterPlugin/CodeInterpreterPlugin.csproj b/dotnet/samples/Demos/CodeInterpreterPlugin/CodeInterpreterPlugin.csproj index 8df5f889470e..53fc3a783be9 100644 --- a/dotnet/samples/Demos/CodeInterpreterPlugin/CodeInterpreterPlugin.csproj +++ b/dotnet/samples/Demos/CodeInterpreterPlugin/CodeInterpreterPlugin.csproj @@ -5,6 +5,7 @@ net8.0 enable enable + $(NoWarn);SKEXP0001 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Demos/CodeInterpreterPlugin/Program.cs b/dotnet/samples/Demos/CodeInterpreterPlugin/Program.cs index 636fa34975b9..48cc93f35528 100644 --- a/dotnet/samples/Demos/CodeInterpreterPlugin/Program.cs +++ b/dotnet/samples/Demos/CodeInterpreterPlugin/Program.cs @@ -97,7 +97,7 @@ async Task TokenProvider() fullAssistantContent.Clear(); await foreach (var content in chatCompletion.GetStreamingChatMessageContentsAsync( chatHistory, - new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, + new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }, kernel) .ConfigureAwait(false)) { diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj b/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj index 3c6ca9a15470..805e10f7d5ac 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj +++ b/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj @@ -28,7 +28,7 @@ - + diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/README.md b/dotnet/samples/Demos/CreateChatGptPlugin/README.md index 3394ad2b1693..e9e035272d3d 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/README.md +++ b/dotnet/samples/Demos/CreateChatGptPlugin/README.md @@ -16,17 +16,16 @@ The sample can be configured by using the command line with .NET [Secret Manager This sample has been tested with the following models: -| Service | Model type | Model | Model version | Supported | -| ------------ | --------------- | ---------------- | ------------: | --------- | -| OpenAI | Text Completion | text-davinci-003 | 1 | โŒ | -| OpenAI | Chat Completion | gpt-3.5-turbo | 1 | โŒ | -| OpenAI | Chat Completion | gpt-3.5-turbo | 0301 | โŒ | -| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 0613 | โœ… | -| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 1106 | โœ… | -| OpenAI | Chat Completion | gpt-4 | 1 | โŒ | -| OpenAI | Chat Completion | gpt-4 | 0314 | โŒ | -| Azure OpenAI | Chat Completion | gpt-4 | 0613 | โœ… | -| Azure OpenAI | Chat Completion | gpt-4 | 1106 | โœ… | +| Service | Model | Model version | Supported | +| ------------ | ---------------- | ------------: | --------- | +| OpenAI | gpt-3.5-turbo | 1 | โŒ | +| OpenAI | gpt-3.5-turbo | 0301 | โŒ | +| Azure OpenAI | gpt-3.5-turbo | 0613 | โœ… | +| Azure OpenAI | gpt-3.5-turbo | 1106 | โœ… | +| OpenAI | gpt-4 | 1 | โŒ | +| OpenAI | gpt-4 | 0314 | โŒ | +| Azure OpenAI | gpt-4 | 0613 | โœ… | +| Azure OpenAI | gpt-4 | 1106 | โœ… | This sample uses function calling, so it only works on models newer than 0613. @@ -39,7 +38,6 @@ cd 14-Create-ChatGPT-Plugin/Solution dotnet user-secrets set "Global:LlmService" "OpenAI" -dotnet user-secrets set "OpenAI:ModelType" "chat-completion" dotnet user-secrets set "OpenAI:ChatCompletionModelId" "gpt-4" dotnet user-secrets set "OpenAI:ApiKey" "... your OpenAI key ..." dotnet user-secrets set "OpenAI:OrgId" "... your ord ID ..." @@ -52,7 +50,6 @@ cd 14-Create-ChatGPT-Plugin/Solution dotnet user-secrets set "Global:LlmService" "AzureOpenAI" -dotnet user-secrets set "AzureOpenAI:DeploymentType" "chat-completion" dotnet user-secrets set "AzureOpenAI:ChatCompletionDeploymentName" "gpt-35-turbo" dotnet user-secrets set "AzureOpenAI:ChatCompletionModelId" "gpt-3.5-turbo-0613" dotnet user-secrets set "AzureOpenAI:Endpoint" "... your Azure OpenAI endpoint ..." diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj index a81e39b415e4..6882606cd2f7 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj +++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj @@ -8,7 +8,7 @@ enable 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 false - $(NoWarn);SKEXP0040 + $(NoWarn);SKEXP0040;SKEXP0001 @@ -16,8 +16,8 @@ + - @@ -26,4 +26,8 @@ + + + + diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/Program.cs b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/Program.cs index 3ff433d6cd8e..4aa53eed32cf 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/Program.cs +++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/Program.cs @@ -34,7 +34,7 @@ // Enable auto function calling OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Get the response from the AI diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs index 3ba36e2bbdb8..a823ac316880 100644 --- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs +++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs @@ -14,47 +14,24 @@ internal static IKernelBuilder WithCompletionService(this IKernelBuilder kernelB switch (Env.Var("Global:LlmService")!) { case "AzureOpenAI": - if (Env.Var("AzureOpenAI:DeploymentType") == "text-completion") - { - kernelBuilder.Services.AddAzureOpenAITextGeneration( - deploymentName: Env.Var("AzureOpenAI:TextCompletionDeploymentName")!, - modelId: Env.Var("AzureOpenAI:TextCompletionModelId"), - endpoint: Env.Var("AzureOpenAI:Endpoint")!, - apiKey: Env.Var("AzureOpenAI:ApiKey")! - ); - } - else if (Env.Var("AzureOpenAI:DeploymentType") == "chat-completion") - { - kernelBuilder.Services.AddAzureOpenAIChatCompletion( - deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!, - modelId: Env.Var("AzureOpenAI:ChatCompletionModelId"), - endpoint: Env.Var("AzureOpenAI:Endpoint")!, - apiKey: Env.Var("AzureOpenAI:ApiKey")! - ); - } + kernelBuilder.Services.AddAzureOpenAIChatCompletion( + deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!, + modelId: Env.Var("AzureOpenAI:ChatCompletionModelId"), + endpoint: Env.Var("AzureOpenAI:Endpoint")!, + apiKey: Env.Var("AzureOpenAI:ApiKey")! + ); break; case "OpenAI": - if (Env.Var("OpenAI:ModelType") == "text-completion") - { - kernelBuilder.Services.AddOpenAITextGeneration( - modelId: Env.Var("OpenAI:TextCompletionModelId")!, - apiKey: Env.Var("OpenAI:ApiKey")!, - orgId: Env.Var("OpenAI:OrgId") - ); - } - else if (Env.Var("OpenAI:ModelType") == "chat-completion") - { - kernelBuilder.Services.AddOpenAIChatCompletion( - modelId: Env.Var("OpenAI:ChatCompletionModelId")!, - apiKey: Env.Var("OpenAI:ApiKey")!, - orgId: Env.Var("OpenAI:OrgId") - ); - } + kernelBuilder.Services.AddOpenAIChatCompletion( + modelId: Env.Var("OpenAI:ChatCompletionModelId")!, + apiKey: Env.Var("OpenAI:ApiKey")!, + orgId: Env.Var("OpenAI:OrgId") + ); break; default: - throw new ArgumentException($"Invalid service type value: {Env.Var("OpenAI:ModelType")}"); + throw new ArgumentException($"Invalid service type value: {Env.Var("Global:LlmService")}"); } return kernelBuilder; diff --git a/dotnet/samples/Demos/FSharpScripts/huggingFaceChatCompletion.fsx b/dotnet/samples/Demos/FSharpScripts/huggingFaceChatCompletion.fsx new file mode 100644 index 000000000000..60c35ac38598 --- /dev/null +++ b/dotnet/samples/Demos/FSharpScripts/huggingFaceChatCompletion.fsx @@ -0,0 +1,86 @@ +#r "nuget: Microsoft.Extensions.DependencyInjection" +#r "nuget: Microsoft.Extensions.Http" +#r "nuget: Microsoft.Extensions.Logging.Console" +#r "nuget: Microsoft.Extensions.Logging" +#r "nuget: Microsoft.SemanticKernel.Connectors.HuggingFace, 1.12.0-preview" + + +open Microsoft.SemanticKernel +open Microsoft.SemanticKernel.ChatCompletion +open Microsoft.Extensions.Logging +open System +open Microsoft.Extensions.DependencyInjection +open Microsoft.Extensions.Http.Logging +open System.Net.Http +open System.Net.Http.Json +open Microsoft.Extensions.Http +open System.Threading.Tasks + +let builder = + // TODO: request your API key in your ๐Ÿค— hugging face private settings + let API_KEY = "TODO_REPLACE_ME" + let MODEL_ID = "microsoft/Phi-3-mini-4k-instruct" // pick the model you prefer! + let API_URL = $"https://api-inference.huggingface.co/" + + let b = Kernel.CreateBuilder().AddHuggingFaceChatCompletion( + MODEL_ID, + API_URL |> Uri, + API_KEY) + + b.Services + .AddLogging(fun b -> + + b.AddFilter("System.Net.Http.HttpClient", + LogLevel.Debug) |> ignore + b.AddFilter("Microsoft.AspNetCore.HttpLogging.HttpLoggingMiddleware", + LogLevel.Debug) |> ignore + + b.AddConsole() |> ignore + b.SetMinimumLevel(LogLevel.Information) |> ignore + + |> ignore + )|> ignore + + b + +let kernel = builder.Build() + +let chatCompletion = + kernel.GetRequiredService() + +let chatHistory = + new ChatHistory(""" + You are an expert in F#, dotnet, aspnet and .fsx and scripting with nuget! + always reply in this example format for conversations + + question: `how do i declare a record in F#?` + --- + answer: + ```fsharp + type Car = { Brand: string } + ``` + + try to keep answers as short and relevant as possible, if you do NOT know, + ASK for more details to the user and wait for the next input + """) + +let mutable exit = false + +while not exit do + printfn "I am an F# assistant, ask me anything!" + + let question = System.Console.ReadLine() + chatHistory.Add(new ChatMessageContent(AuthorRole.Assistant, question)) + + let result = + chatCompletion.GetChatMessageContentAsync(chatHistory) + |> Async.AwaitTask + |> Async.RunSynchronously + + Console.WriteLine(result.Role) + Console.WriteLine(result.Content) + + printfn "another round? y/n" + printfn "\r\n" + let reply = Console.ReadKey() + exit <- reply.KeyChar.ToString().ToLower() <> "y" diff --git a/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj b/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj index ead3b5036cb4..e39a7f5b795d 100644 --- a/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj +++ b/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj @@ -13,7 +13,7 @@ - + diff --git a/dotnet/samples/Demos/FunctionInvocationApproval/Program.cs b/dotnet/samples/Demos/FunctionInvocationApproval/Program.cs index e0eb9a4684e9..f6389ddecf52 100644 --- a/dotnet/samples/Demos/FunctionInvocationApproval/Program.cs +++ b/dotnet/samples/Demos/FunctionInvocationApproval/Program.cs @@ -37,7 +37,7 @@ public static async Task Main() var executionSettings = new OpenAIPromptExecutionSettings { Temperature = 0, - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Initialize kernel arguments. diff --git a/dotnet/samples/Demos/HomeAutomation/HomeAutomation.csproj b/dotnet/samples/Demos/HomeAutomation/HomeAutomation.csproj index 06dfceda8b48..bf825d89e087 100644 --- a/dotnet/samples/Demos/HomeAutomation/HomeAutomation.csproj +++ b/dotnet/samples/Demos/HomeAutomation/HomeAutomation.csproj @@ -6,7 +6,7 @@ enable enable 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 - $(NoWarn);CA2007,CA2208,CS1591,IDE0009,IDE0055,IDE0073,VSTHRD111 + $(NoWarn);CA2007,CA2208,CS1591,IDE0009,IDE0055,IDE0073,VSTHRD111,SKEXP0001 diff --git a/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs b/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs similarity index 91% rename from dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs rename to dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs index f4096b5e95d5..ef20853597cc 100644 --- a/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs +++ b/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs @@ -7,7 +7,7 @@ namespace HomeAutomation.Options; /// /// Azure OpenAI settings. /// -public sealed class AzureOpenAI +public sealed class AzureOpenAIOptions { [Required] public string ChatDeploymentName { get; set; } = string.Empty; diff --git a/dotnet/samples/Demos/HomeAutomation/Program.cs b/dotnet/samples/Demos/HomeAutomation/Program.cs index e55279405ceb..8f4882e3303f 100644 --- a/dotnet/samples/Demos/HomeAutomation/Program.cs +++ b/dotnet/samples/Demos/HomeAutomation/Program.cs @@ -32,24 +32,25 @@ internal static async Task Main(string[] args) builder.Services.AddHostedService(); // Get configuration - builder.Services.AddOptions() - .Bind(builder.Configuration.GetSection(nameof(AzureOpenAI))) + builder.Services.AddOptions() + .Bind(builder.Configuration.GetSection(nameof(AzureOpenAIOptions))) .ValidateDataAnnotations() .ValidateOnStart(); // Chat completion service that kernels will use builder.Services.AddSingleton(sp => { - AzureOpenAI options = sp.GetRequiredService>().Value; + OpenAIOptions options = sp.GetRequiredService>().Value; // A custom HttpClient can be provided to this constructor - return new AzureOpenAIChatCompletionService(options.ChatDeploymentName, options.Endpoint, options.ApiKey); + return new OpenAIChatCompletionService(options.ChatModelId, options.ApiKey); - /* Alternatively, you can use plain, non-Azure OpenAI after loading OpenAIOptions instead - of AzureOpenAI options with builder.Services.AddOptions: - OpenAI options = sp.GetRequiredService>().Value; + /* Alternatively, you can use plain, Azure OpenAI after loading AzureOpenAIOptions instead + of OpenAI options with builder.Services.AddOptions: - return new OpenAIChatCompletionService(options.ChatModelId, options.ApiKey);*/ + AzureOpenAIOptions options = sp.GetRequiredService>().Value; + + return new AzureOpenAIChatCompletionService(options.ChatDeploymentName, options.Endpoint, options.ApiKey); */ }); // Add plugins that can be used by kernels diff --git a/dotnet/samples/Demos/HomeAutomation/Worker.cs b/dotnet/samples/Demos/HomeAutomation/Worker.cs index 88312ab15b1d..4d25107db745 100644 --- a/dotnet/samples/Demos/HomeAutomation/Worker.cs +++ b/dotnet/samples/Demos/HomeAutomation/Worker.cs @@ -26,7 +26,7 @@ protected override async Task ExecuteAsync(CancellationToken stoppingToken) // Enable auto function calling OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine("Ask questions or give instructions to the copilot such as:\n" + diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/Facts/KernelMemory.txt b/dotnet/samples/Demos/OnnxSimpleRAG/Facts/KernelMemory.txt new file mode 100644 index 000000000000..6478fa964656 --- /dev/null +++ b/dotnet/samples/Demos/OnnxSimpleRAG/Facts/KernelMemory.txt @@ -0,0 +1 @@ +Kernel Memory (KM) is a multi-modal AI Service specialized in the efficient indexing of datasets through custom continuous data hybrid pipelines, with support for Retrieval Augmented Generation (RAG), synthetic memory, prompt engineering, and custom semantic memory processing. \ No newline at end of file diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/Facts/SemanticKernel.txt b/dotnet/samples/Demos/OnnxSimpleRAG/Facts/SemanticKernel.txt new file mode 100644 index 000000000000..1e01789793de --- /dev/null +++ b/dotnet/samples/Demos/OnnxSimpleRAG/Facts/SemanticKernel.txt @@ -0,0 +1 @@ +Semantic Kernel is a lightweight, open-source development kit that lets you easily build AI agents and integrate the latest AI models into your C#, Python, or Java codebase. It serves as an efficient middleware that enables rapid delivery of enterprise-grade solutions. \ No newline at end of file diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj b/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj new file mode 100644 index 000000000000..e20919d8cc66 --- /dev/null +++ b/dotnet/samples/Demos/OnnxSimpleRAG/OnnxSimpleRAG.csproj @@ -0,0 +1,27 @@ +๏ปฟ + + + Exe + net8.0 + $(NoWarn);CA2007;CS0612;VSTHRD111 + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + + + + + + + + + + + + + + + + Always + + + + diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs b/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs new file mode 100644 index 000000000000..1569b73ae0e4 --- /dev/null +++ b/dotnet/samples/Demos/OnnxSimpleRAG/Program.cs @@ -0,0 +1,91 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +#pragma warning disable SKEXP0070 +#pragma warning disable SKEXP0050 +#pragma warning disable SKEXP0001 + +using System; +using System.IO; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Memory; +using Microsoft.SemanticKernel.Plugins.Memory; + +// Ensure you follow the preparation steps provided in the README.md +var config = new ConfigurationBuilder().AddUserSecrets().Build(); + +// Path to the folder of your downloaded ONNX PHI-3 model +var chatModelPath = config["Onnx:ModelPath"]!; +var chatModelId = config["Onnx:ModelId"] ?? "phi-3"; + +// Path to the file of your downloaded ONNX BGE-MICRO-V2 model +var embeddingModelPath = config["Onnx:EmbeddingModelPath"]!; + +// Path to the vocab file your ONNX BGE-MICRO-V2 model +var embeddingVocabPath = config["Onnx:EmbeddingVocabPath"]!; + +// Load the services +var builder = Kernel.CreateBuilder() + .AddOnnxRuntimeGenAIChatCompletion(chatModelId, chatModelPath) + .AddBertOnnxTextEmbeddingGeneration(embeddingModelPath, embeddingVocabPath); + +// Build Kernel +var kernel = builder.Build(); + +// Get the instances of the services +var chatService = kernel.GetRequiredService(); +var embeddingService = kernel.GetRequiredService(); + +// Create a memory store and a semantic text memory +var memoryStore = new VolatileMemoryStore(); +var semanticTextMemory = new SemanticTextMemory(memoryStore, embeddingService); + +// Loading it for Save, Recall and other methods +kernel.ImportPluginFromObject(new TextMemoryPlugin(semanticTextMemory)); + +// Save some information to the memory +var collectionName = "ExampleCollection"; +foreach (var factTextFile in Directory.GetFiles("Facts", "*.txt")) +{ + var factContent = File.ReadAllText(factTextFile); + await semanticTextMemory.SaveInformationAsync( + collection: collectionName, + text: factContent, + id: Guid.NewGuid().ToString()); +} + +// Start the conversation +while (true) +{ + // Get user input + Console.ForegroundColor = ConsoleColor.White; + Console.Write("User > "); + var question = Console.ReadLine()!; + + if (question is null || string.IsNullOrWhiteSpace(question)) + { + // Exit the demo if the user input is null or empty + return; + } + + // Invoke the kernel with the user input + var response = kernel.InvokePromptStreamingAsync( + promptTemplate: @"Question: {{$input}} + Answer the question using the memory content: {{Recall}}", + arguments: new KernelArguments() + { + { "input", question }, + { "collection", collectionName } + }); + + Console.Write("\nAssistant > "); + + await foreach (var message in response) + { + Console.Write(message); + } + + Console.WriteLine(); +} diff --git a/dotnet/samples/Demos/OnnxSimpleRAG/README.md b/dotnet/samples/Demos/OnnxSimpleRAG/README.md new file mode 100644 index 000000000000..da6a3ad726ff --- /dev/null +++ b/dotnet/samples/Demos/OnnxSimpleRAG/README.md @@ -0,0 +1,97 @@ +๏ปฟ# Onnx Simple RAG (Retrieval Augmented Generation) Sample + +This sample demonstrates how you can do RAG using Semantic Kernel with the ONNX Connector that enables running Local Models straight from files. + +In this example we setup two ONNX AI Services: +- Chat Completion with [Microsoft's Phi-3-ONNX](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-onnx) model +- Text Embeddings with [Taylor's BGE Micro V2](https://huggingface.co/TaylorAI/bge-micro-v2) for embeddings to enable RAG for user queries. + +> [!IMPORTANT] +> You can modify to use any other combination of models enabled for ONNX runtime. + +## Semantic Kernel used Features + +- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service from [Onnx Connector](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs) to generate responses from the Local Model. +- [Text Embeddings Generation Service]() - Using the Text Embeddings Generation Service from [Onnx Connector](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.Onnx/BertOnnxTextEmbeddingGenerationService.cs) to generate +- [Memory Store](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Memory/IMemoryStore.cs) Using Memory Store Service with [VolatileMemoryStore](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Plugins/Plugins.Memory/VolatileMemoryStore.cs) to store and retrieve embeddings in memory for RAG. +- [Semantic Text Memory](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Memory/SemanticTextMemory.cs) to manage the embeddings in memory for RAG. +- [Text Memory Plugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Plugins/Plugins.Memory/TextMemoryPlugin.cs) to enable memory retrieval functions (Recall) to be used with Prompts for RAG. + +## Prerequisites + +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0). + +## 1. Configuring the sample + +### Downloading the Models + +For this example we chose Hugging Face as our repository for download of the local models, go to a directory of your choice where the models should be downloaded and run the following commands: + +```powershell +git clone https://huggingface.co/TaylorAI/bge-micro-v2 +git clone https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-onnx +``` + +> [!IMPORTANT] +> Both `BGE-Micro-V2` and `Phi-3` models are too large to be downloaded by the `git clone` command alone if you don't have [git-lfs extension](https://git-lfs.com/) installed, for this you may need to download the models manually and overwrite the files in the cloned directories. + +- Manual download [BGE-Micro-V2](https://huggingface.co/TaylorAI/bge-micro-v2/resolve/main/onnx/model.onnx?download=true) (69 MB) +- Manual download [Phi-3-Mini-4k CPU](https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-onnx/resolve/main/cpu_and_mobile/cpu-int4-rtn-block-32/phi3-mini-4k-instruct-cpu-int4-rtn-block-32.onnx.data?download=true) (โ‰ˆ2.7 GB) + +Update the `Program.cs` file lines below with the paths to the models you downloaded in the previous step. + +```csharp +// Path to the folder of your downloaded ONNX PHI-3 model +var chatModelPath = @"C:\path\to\huggingface\Phi-3-mini-4k-instruct-onnx\cpu_and_mobile\cpu-int4-rtn-block-32"; + +// Path to the file of your downloaded ONNX BGE-MICRO-V2 model +var embeddingModelPath = @"C:\path\to\huggingface\bge-micro-v2\onnx\model.onnx"; + +// Path to the vocab file your ONNX BGE-MICRO-V2 model +var embeddingVocabPath = @"C:\path\to\huggingface\bge-micro-v2\vocab.txt"; +``` + +**Optional**: Change or add any fact text you want your AI to know about to the `facts` list in the `Program.cs` file. + +```csharp +foreach (var fact in new[] { + "My fact 1.", + "My fact 2." }) +{ +``` + +## Configuring the sample + +The sample can be configured by using the command line with .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests. + +### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) + +```powershell +dotnet user-secrets set "Onnx:ModelId" ".. Onnx model id" (default: phi-3) +dotnet user-secrets set "Onnx:ModelPath" ".. your Onnx model folder path .." +dotnet user-secrets set "Onnx:EmbeddingModelPath" ".. your Onnx model file path .." +dotnet user-secrets set "Onnx:EmbeddingVocabPath" ".. your Onnx model vocab file path .." +``` + +## 3. Running the sample + +After configuring the sample, to build and run the console application just hit `F5`. + +To build and run the console application from the terminal use the following commands: + +```powershell +dotnet build +dotnet run +``` + +Ask to the Model any question you want, the AI will try to answer based on the facts you provided. + +### Example of a conversation + +> **User** > What is semantic kernel SDK used for? + +> **Assistant** > The Semantic Kernel SDK is used for developing AI agents and integrating the latest AI models into your C#, Python, or Java codebase. It acts as a lightweight, open-source development kit that facilitates the creation of AI applications with enterprise-grade capabilities. By serving as an efficient middleware, it allows for the rapid development and deployment of AI solutions across various programming environments. + +> **User** > When should I use Kernel Memory? + +> **Assistant** > Kernel Memory is best suited for tasks that involve large-scale continuous data, require efficient indexing and processing, and benefit from the integration of retrieval-based and generative models, synthetic memory, prompt engineering, and custom semantic memory processing. diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs index 8878bc0b57e5..9d0ae902a152 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs @@ -1,5 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + +using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; @@ -8,6 +11,8 @@ using StepwisePlannerMigration.Plugins; using StepwisePlannerMigration.Services; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Controllers; /// @@ -43,7 +48,7 @@ public async Task GeneratePlanAsync(PlanRequest request) ChatHistory chatHistory = []; chatHistory.AddUserMessage(request.Goal); - OpenAIPromptExecutionSettings executionSettings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings executionSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, executionSettings, this._kernel); @@ -57,7 +62,7 @@ public async Task GeneratePlanAsync(PlanRequest request) [HttpPost, Route("execute-new-plan")] public async Task ExecuteNewPlanAsync(PlanRequest request) { - OpenAIPromptExecutionSettings executionSettings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings executionSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; FunctionResult result = await this._kernel.InvokePromptAsync(request.Goal, new(executionSettings)); @@ -72,7 +77,7 @@ public async Task ExecuteNewPlanAsync(PlanRequest request) public async Task ExecuteExistingPlanAsync() { ChatHistory chatHistory = this._planProvider.GetPlan("auto-function-calling-plan.json"); - OpenAIPromptExecutionSettings executionSettings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings executionSettings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; ChatMessageContent result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, executionSettings, this._kernel); diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs index f060268833ca..096ce4795fb3 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs @@ -1,5 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + +using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; @@ -8,6 +11,8 @@ using StepwisePlannerMigration.Plugins; using StepwisePlannerMigration.Services; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Controllers; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs index a7eca68c33c8..3407d79479ed 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.ComponentModel.DataAnnotations; +using Microsoft.Extensions.Configuration; namespace StepwisePlannerMigration.Extensions; diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs index 7a1ce92d0a71..80b976702ed3 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs @@ -1,8 +1,13 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + +using System; using System.ComponentModel; using Microsoft.SemanticKernel; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Plugins; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs index dfd72dd36c2c..52658a47e13e 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs @@ -1,8 +1,12 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + using System.ComponentModel; using Microsoft.SemanticKernel; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Plugins; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs index 99b62fba30b7..cd9186d405b2 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs @@ -1,5 +1,10 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.IO; +using Microsoft.AspNetCore.Builder; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Planning; using StepwisePlannerMigration.Extensions; diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs index 4bdae07f6ed7..695a3a18e9c9 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs @@ -1,7 +1,11 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +#pragma warning disable IDE0005 // Using directive is unnecessary + using Microsoft.SemanticKernel.ChatCompletion; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Services; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs index 13218eeec135..ed5bd4f03fe1 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs +++ b/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs @@ -1,8 +1,14 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.IO; using System.Text.Json; + +#pragma warning disable IDE0005 // Using directive is unnecessary + using Microsoft.SemanticKernel.ChatCompletion; +#pragma warning restore IDE0005 // Using directive is unnecessary + namespace StepwisePlannerMigration.Services; /// diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj b/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj index 1475397e7eb2..abd289077625 100644 --- a/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj +++ b/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj @@ -3,7 +3,6 @@ net8.0 enable - enable $(NoWarn);VSTHRD111,CA2007,CS8618,CS1591,SKEXP0001, SKEXP0060 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 diff --git a/dotnet/samples/Demos/TelemetryWithAppInsights/Program.cs b/dotnet/samples/Demos/TelemetryWithAppInsights/Program.cs index e2407b3428c5..cefe43a1c2cd 100644 --- a/dotnet/samples/Demos/TelemetryWithAppInsights/Program.cs +++ b/dotnet/samples/Demos/TelemetryWithAppInsights/Program.cs @@ -323,7 +323,7 @@ public bool TrySelectAIService( AzureOpenAIServiceKey => new OpenAIPromptExecutionSettings() { Temperature = 0, - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }, GoogleAIGeminiServiceKey => new GeminiPromptExecutionSettings() { diff --git a/dotnet/samples/Demos/TelemetryWithAppInsights/README.md b/dotnet/samples/Demos/TelemetryWithAppInsights/README.md index 0194af9dc0ef..f177f98fc43d 100644 --- a/dotnet/samples/Demos/TelemetryWithAppInsights/README.md +++ b/dotnet/samples/Demos/TelemetryWithAppInsights/README.md @@ -1,6 +1,6 @@ ๏ปฟ# Semantic Kernel Telemetry with AppInsights -This example project shows how an application can be configured to send Semantic Kernel telemetry to Application Insights. +This sample project shows how a .Net application can be configured to send Semantic Kernel telemetry to Application Insights. > Note that it is also possible to use other Application Performance Management (APM) vendors. An example is [Prometheus](https://prometheus.io/docs/introduction/overview/). Please refer to this [link](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/metrics-collection#configure-the-example-app-to-use-opentelemetrys-prometheus-exporter) on how to do it. @@ -16,7 +16,7 @@ For more information, please refer to the following articles: ## What to expect -The Semantic Kernel SDK is designed to efficiently generate comprehensive logs, traces, and metrics throughout the flow of function execution and model invocation. This allows you to effectively monitor your AI application's performance and accurately track token consumption. +The Semantic Kernel .Net SDK is designed to efficiently generate comprehensive logs, traces, and metrics throughout the flow of function execution and model invocation. This allows you to effectively monitor your AI application's performance and accurately track token consumption. > `ActivitySource.StartActivity` internally determines if there are any listeners recording the Activity. If there are no registered listeners or there are listeners that are not interested, StartActivity() will return null and avoid creating the Activity object. Read more [here](https://learn.microsoft.com/en-us/dotnet/core/diagnostics/distributed-tracing-instrumentation-walkthroughs). @@ -74,7 +74,7 @@ dotnet user-secrets set "MistralAI:ApiKey" "..." dotnet user-secrets set "ApplicationInsights:ConnectionString" "..." ``` -## Running the example +## Running the sample Simply run `dotnet run` under this directory if the command line interface is preferred. Otherwise, this example can also be run in Visual Studio. diff --git a/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj b/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj index aaf0e5545b76..5bbf69a5c72a 100644 --- a/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj +++ b/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj @@ -7,7 +7,7 @@ disable false - $(NoWarn);CA1024;CA1050;CA1707;CA2007;CS1591;VSTHRD111,SKEXP0050,SKEXP0060,SKEXP0070 + $(NoWarn);CA1024;CA1050;CA1707;CA2007;CS1591;VSTHRD111,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0001 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 @@ -18,8 +18,8 @@ + - diff --git a/dotnet/samples/Demos/TimePlugin/Program.cs b/dotnet/samples/Demos/TimePlugin/Program.cs index 405e443db0f2..5ee075d5641f 100644 --- a/dotnet/samples/Demos/TimePlugin/Program.cs +++ b/dotnet/samples/Demos/TimePlugin/Program.cs @@ -31,7 +31,7 @@ // Enable auto function calling OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine("Ask questions to use the Time Plugin such as:\n" + diff --git a/dotnet/samples/Demos/TimePlugin/TimePlugin.csproj b/dotnet/samples/Demos/TimePlugin/TimePlugin.csproj index 37a777d6a97e..a217de67e2d2 100644 --- a/dotnet/samples/Demos/TimePlugin/TimePlugin.csproj +++ b/dotnet/samples/Demos/TimePlugin/TimePlugin.csproj @@ -6,6 +6,7 @@ enable enable 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + $(NoWarn);SKEXP0001 diff --git a/dotnet/samples/GettingStarted/GettingStarted.csproj b/dotnet/samples/GettingStarted/GettingStarted.csproj index bbfb30f31a72..81581e7b4d57 100644 --- a/dotnet/samples/GettingStarted/GettingStarted.csproj +++ b/dotnet/samples/GettingStarted/GettingStarted.csproj @@ -50,7 +50,7 @@ - + @@ -60,6 +60,6 @@ - + \ No newline at end of file diff --git a/dotnet/samples/GettingStarted/Step2_Add_Plugins.cs b/dotnet/samples/GettingStarted/Step2_Add_Plugins.cs index bdca86fc2ff3..bb2fc387aabe 100644 --- a/dotnet/samples/GettingStarted/Step2_Add_Plugins.cs +++ b/dotnet/samples/GettingStarted/Step2_Add_Plugins.cs @@ -35,7 +35,7 @@ public async Task RunAsync() Console.WriteLine(await kernel.InvokePromptAsync("The current time is {{TimeInformation.GetCurrentUtcTime}}. How many days until Christmas?")); // Example 3. Invoke the kernel with a prompt and allow the AI to automatically invoke functions - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings))); // Example 4. Invoke the kernel with a prompt and allow the AI to automatically invoke functions that use enumerations diff --git a/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs b/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs index 15d90a3c7b53..dd39962d627a 100644 --- a/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs +++ b/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs @@ -41,7 +41,7 @@ private ServiceProvider BuildServiceProvider() collection.AddSingleton(new XunitLogger(this.Output)); var kernelBuilder = collection.AddKernel(); - kernelBuilder.Services.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey); + kernelBuilder.Services.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey); kernelBuilder.Plugins.AddFromType(); return collection.BuildServiceProvider(); diff --git a/dotnet/samples/GettingStarted/Step7_Observability.cs b/dotnet/samples/GettingStarted/Step7_Observability.cs index 0191ea5316f5..c65c2eb92209 100644 --- a/dotnet/samples/GettingStarted/Step7_Observability.cs +++ b/dotnet/samples/GettingStarted/Step7_Observability.cs @@ -33,7 +33,7 @@ public async Task ObservabilityWithFiltersAsync() kernel.PromptRenderFilters.Add(new MyPromptFilter(this.Output)); // Invoke the kernel with a prompt and allow the AI to automatically invoke functions - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings))); } @@ -88,7 +88,7 @@ void MyInvokedHandler(object? sender, FunctionInvokedEventArgs e) kernel.FunctionInvoked += MyInvokedHandler; // Invoke the kernel with a prompt and allow the AI to automatically invoke functions - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; Console.WriteLine(await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings))); } diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj index ea4decbf86bb..23b3aa7989a4 100644 --- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj +++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj @@ -9,7 +9,7 @@ true - $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110 + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001 Library 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 @@ -32,15 +32,19 @@ - + + + true + - + + @@ -48,4 +52,14 @@ + + + Always + + + + + + + diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md index 39952506548c..ed0e68802994 100644 --- a/dotnet/samples/GettingStartedWithAgents/README.md +++ b/dotnet/samples/GettingStartedWithAgents/README.md @@ -19,13 +19,17 @@ The getting started with agents examples include: Example|Description ---|--- -[Step1_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs)|How to create and use an agent. -[Step2_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs)|How to associate plug-ins with an agent. -[Step3_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs)|How to create a conversation between agents. -[Step4_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_. -[Step5_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs)|How to have an agent produce JSON. -[Step6_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs)|How to define dependency injection patterns for agents. -[Step7_OpenAIAssistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step7_OpenAIAssistant.cs)|How to create an Open AI Assistant agent. +[Step01_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs)|How to create and use an agent. +[Step02_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs)|How to associate plug-ins with an agent. +[Step03_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs)|How to create a conversation between agents. +[Step04_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_. +[Step05_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs)|How to have an agent produce JSON. +[Step06_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs)|How to define dependency injection patterns for agents. +[Step07_Logging](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs)|How to enable logging for agents. +[Step08_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs)|How to create an Open AI Assistant agent. +[Step09_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent. +[Step10_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent. +[Step11_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent. ## Legacy Agents diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml b/dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml new file mode 100644 index 000000000000..fc5ecd88f34e --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Resources/GenerateStory.yaml @@ -0,0 +1,17 @@ +๏ปฟname: GenerateStory +template: | + Tell a story about {{$topic}} that is {{$length}} sentences long. +template_format: semantic-kernel +description: A function that generates a story about a topic. +input_variables: + - name: topic + description: The topic of the story. + is_required: true + - name: length + description: The number of sentences in the story. + is_required: true +output_variable: + description: The generated story. +execution_settings: + default: + temperature: 0.6 diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg b/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg new file mode 100644 index 000000000000..1e9f26de48fc Binary files /dev/null and b/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg differ diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf b/dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf new file mode 100644 index 000000000000..bba45f80a90b Binary files /dev/null and b/dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf differ diff --git a/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs new file mode 100644 index 000000000000..dfd6aeb22fb3 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs @@ -0,0 +1,100 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; + +namespace GettingStarted; + +/// +/// Demonstrate creation of and +/// eliciting its response to three explicit user messages. +/// +public class Step01_Agent(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private const string ParrotName = "Parrot"; + private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; + + [Fact] + public async Task UseSingleChatCompletionAgentAsync() + { + Kernel kernel = this.CreateKernelWithChatCompletion(); + + // Define the agent + ChatCompletionAgent agent = + new() + { + Name = ParrotName, + Instructions = ParrotInstructions, + Kernel = this.CreateKernelWithChatCompletion(), + }; + + /// Create the chat history to capture the agent interaction. + ChatHistory chat = []; + + // Respond to user input + await InvokeAgentAsync("Fortune favors the bold."); + await InvokeAgentAsync("I came, I saw, I conquered."); + await InvokeAgentAsync("Practice makes perfect."); + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(chat)) + { + chat.Add(response); + + this.WriteAgentChatMessage(response); + } + } + } + + [Fact] + public async Task UseTemplateForChatCompletionAgentAsync() + { + // Define the agent + string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml"); + PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml); + + // Instructions, Name and Description properties defined via the config. + ChatCompletionAgent agent = + new(templateConfig, new KernelPromptTemplateFactory()) + { + Kernel = this.CreateKernelWithChatCompletion(), + Arguments = new KernelArguments() + { + { "topic", "Dog" }, + { "length", "3" }, + } + }; + + /// Create the chat history to capture the agent interaction. + ChatHistory chat = []; + + // Invoke the agent with the default arguments. + await InvokeAgentAsync(); + + // Invoke the agent with the override arguments. + await InvokeAgentAsync( + new() + { + { "topic", "Cat" }, + { "length", "3" }, + }); + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(KernelArguments? arguments = null) + { + await foreach (ChatMessageContent content in agent.InvokeAsync(chat, arguments)) + { + chat.Add(content); + + WriteAgentChatMessage(content); + } + } + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs similarity index 73% rename from dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs rename to dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs index 7946adc7f687..047020a90b67 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs @@ -11,7 +11,7 @@ namespace GettingStarted; /// Demonstrate creation of with a , /// and then eliciting its response to explicit user messages. /// -public class Step2_Plugins(ITestOutputHelper output) : BaseTest(output) +public class Step02_Plugins(ITestOutputHelper output) : BaseAgentsTest(output) { private const string HostName = "Host"; private const string HostInstructions = "Answer questions about the menu."; @@ -26,7 +26,7 @@ public async Task UseChatCompletionWithPluginAgentAsync() Instructions = HostInstructions, Name = HostName, Kernel = this.CreateKernelWithChatCompletion(), - Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }), + Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), }; // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). @@ -45,37 +45,34 @@ public async Task UseChatCompletionWithPluginAgentAsync() // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.Add(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, input); + chat.Add(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in agent.InvokeAsync(chat)) + await foreach (ChatMessageContent response in agent.InvokeAsync(chat)) { - chat.Add(content); + chat.Add(response); - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } } } - public sealed class MenuPlugin + private sealed class MenuPlugin { [KernelFunction, Description("Provides a list of specials from the menu.")] [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } + public string GetSpecials() => + """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """; [KernelFunction, Description("Provides the price of the requested menu item.")] public string GetItemPrice( [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } + string menuItem) => + "$9.99"; } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs b/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs similarity index 86% rename from dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs rename to dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs index 5d0c185f95f5..1ada85d512f3 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs @@ -11,7 +11,7 @@ namespace GettingStarted; /// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum /// number of agent interactions. /// -public class Step3_Chat(ITestOutputHelper output) : BaseTest(output) +public class Step03_Chat(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -74,16 +74,16 @@ public async Task UseAgentGroupChatWithTwoAgentsAsync() }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } private sealed class ApprovalTerminationStrategy : TerminationStrategy diff --git a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs similarity index 74% rename from dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs rename to dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs index d71b6ae26767..f3916ad1e583 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs @@ -2,6 +2,7 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; +using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; namespace GettingStarted; @@ -10,7 +11,7 @@ namespace GettingStarted; /// Demonstrate usage of and /// to manage execution. /// -public class Step4_KernelFunctionStrategies(ITestOutputHelper output) : BaseTest(output) +public class Step04_KernelFunctionStrategies(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -27,6 +28,7 @@ public class Step4_KernelFunctionStrategies(ITestOutputHelper output) : BaseTest You are a copywriter with ten years of experience and are known for brevity and a dry humor. The goal is to refine and decide on the single best copy as an expert in the field. Only provide a single proposal per response. + Never delimit the response with quotation marks. You're laser focused on the goal at hand. Don't waste time with chit chat. Consider suggestions when refining an idea. @@ -53,31 +55,37 @@ public async Task UseKernelFunctionStrategiesWithAgentGroupChatAsync() }; KernelFunction terminationFunction = - KernelFunctionFactory.CreateFromPrompt( + AgentGroupChat.CreatePromptFunctionForStrategy( """ Determine if the copy has been approved. If so, respond with a single word: yes History: {{$history}} - """); + """, + safeParameterNames: "history"); KernelFunction selectionFunction = - KernelFunctionFactory.CreateFromPrompt( + AgentGroupChat.CreatePromptFunctionForStrategy( $$$""" - Your job is to determine which participant takes the next turn in a conversation according to the action of the most recent participant. + Determine which participant takes the next turn in a conversation based on the the most recent participant. State only the name of the participant to take the next turn. + No participant should take more than one turn in a row. Choose only from these participants: - {{{ReviewerName}}} - {{{CopyWriterName}}} Always follow these rules when selecting the next participant: - - After {{{CopyWriterName}}} replies, it is {{{ReviewerName}}}'s turn. - - After {{{ReviewerName}}} provides feedback, it is {{{CopyWriterName}}}'s turn. + - After {{{CopyWriterName}}}, it is {{{ReviewerName}}}'s turn. + - After {{{ReviewerName}}}, it is {{{CopyWriterName}}}'s turn. History: {{$history}} - """); + """, + safeParameterNames: "history"); + + // Limit history used for selection and termination to the most recent message. + ChatHistoryTruncationReducer strategyReducer = new(1); // Create a chat for agent interaction. AgentGroupChat chat = @@ -99,6 +107,8 @@ State only the name of the participant to take the next turn. HistoryVariableName = "history", // Limit total number of turns MaximumIterations = 10, + // Save tokens by not including the entire history in the prompt + HistoryReducer = strategyReducer, }, // Here a KernelFunctionSelectionStrategy selects agents based on a prompt function. SelectionStrategy = @@ -108,24 +118,24 @@ State only the name of the participant to take the next turn. InitialAgent = agentWriter, // Returns the entire result value as a string. ResultParser = (result) => result.GetValue() ?? CopyWriterName, - // The prompt variable name for the agents argument. - AgentsVariableName = "agents", // The prompt variable name for the history argument. HistoryVariableName = "history", + // Save tokens by not including the entire history in the prompt + HistoryReducer = strategyReducer, }, } }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent message = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent responese in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(responese); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } } diff --git a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs b/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs similarity index 79% rename from dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs rename to dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs index 20ad4c2096d4..8806c7d3b62d 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs @@ -10,14 +10,14 @@ namespace GettingStarted; /// /// Demonstrate parsing JSON response. /// -public class Step5_JsonResult(ITestOutputHelper output) : BaseTest(output) +public class Step05_JsonResult(ITestOutputHelper output) : BaseAgentsTest(output) { private const int ScoreCompletionThreshold = 70; private const string TutorName = "Tutor"; private const string TutorInstructions = """ - Think step-by-step and rate the user input on creativity and expressivness from 1-100. + Think step-by-step and rate the user input on creativity and expressiveness from 1-100. Respond in JSON format with the following JSON schema: @@ -60,19 +60,20 @@ public async Task UseKernelFunctionStrategiesWithJsonResultAsync() // Local function to invoke agent and display the conversation messages. async Task InvokeAgentAsync(string input) { - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + ChatMessageContent message = new(AuthorRole.User, input); + chat.AddChatMessage(message); + this.WriteAgentChatMessage(message); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in chat.InvokeAsync(agent)) + await foreach (ChatMessageContent response in chat.InvokeAsync(agent)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + this.WriteAgentChatMessage(response); + + Console.WriteLine($"[IS COMPLETED: {chat.IsComplete}]"); } } } - private record struct InputScore(int score, string notes); + private record struct WritingScore(int score, string notes); private sealed class ThresholdTerminationStrategy : TerminationStrategy { @@ -80,7 +81,7 @@ protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyLi { string lastMessageContent = history[history.Count - 1].Content ?? string.Empty; - InputScore? result = JsonResultTranslator.Translate(lastMessageContent); + WritingScore? result = JsonResultTranslator.Translate(lastMessageContent); return Task.FromResult((result?.score ?? 0) >= ScoreCompletionThreshold); } diff --git a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs similarity index 63% rename from dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs rename to dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs index 21af5db70dce..5beb969bf090 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs @@ -3,23 +3,19 @@ using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.ChatCompletion; -using Resources; namespace GettingStarted; /// /// Demonstrate creation of an agent via dependency injection. /// -public class Step6_DependencyInjection(ITestOutputHelper output) : BaseTest(output) +public class Step06_DependencyInjection(ITestOutputHelper output) : BaseAgentsTest(output) { - private const int ScoreCompletionThreshold = 70; - private const string TutorName = "Tutor"; private const string TutorInstructions = """ - Think step-by-step and rate the user input on creativity and expressivness from 1-100. + Think step-by-step and rate the user input on creativity and expressiveness from 1-100. Respond in JSON format with the following JSON schema: @@ -62,7 +58,7 @@ public async Task UseDependencyInjectionToCreateAgentAsync() { Instructions = TutorInstructions, Name = TutorName, - Kernel = sp.GetRequiredService(), + Kernel = sp.GetRequiredService().Clone(), }); // Create a service provider for resolving registered services @@ -73,57 +69,34 @@ public async Task UseDependencyInjectionToCreateAgentAsync() AgentClient agentClient = serviceProvider.GetRequiredService(); // Execute the agent-client - await WriteAgentResponse("The sunset is very colorful."); + await WriteAgentResponse("The sunset is nice."); await WriteAgentResponse("The sunset is setting over the mountains."); await WriteAgentResponse("The sunset is setting over the mountains and filled the sky with a deep red flame, setting the clouds ablaze."); // Local function to invoke agent and display the conversation messages. async Task WriteAgentResponse(string input) { - Console.WriteLine($"# {AuthorRole.User}: {input}"); + ChatMessageContent message = new(AuthorRole.User, input); + this.WriteAgentChatMessage(message); - await foreach (ChatMessageContent content in agentClient.RunDemoAsync(input)) + await foreach (ChatMessageContent response in agentClient.RunDemoAsync(message)) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } } } private sealed class AgentClient([FromKeyedServices(TutorName)] ChatCompletionAgent agent) { - private readonly AgentGroupChat _chat = - new() - { - ExecutionSettings = - new() - { - // Here a TerminationStrategy subclass is used that will terminate when - // the response includes a score that is greater than or equal to 70. - TerminationStrategy = new ThresholdTerminationStrategy() - } - }; - - public IAsyncEnumerable RunDemoAsync(string input) - { - // Create a chat for agent interaction. + private readonly AgentGroupChat _chat = new(); - this._chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + public IAsyncEnumerable RunDemoAsync(ChatMessageContent input) + { + this._chat.AddChatMessage(input); return this._chat.InvokeAsync(agent); } } - private record struct InputScore(int score, string notes); - - private sealed class ThresholdTerminationStrategy : TerminationStrategy - { - protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken) - { - string lastMessageContent = history[history.Count - 1].Content ?? string.Empty; - - InputScore? result = JsonResultTranslator.Translate(lastMessageContent); - - return Task.FromResult((result?.score ?? 0) >= ScoreCompletionThreshold); - } - } + private record struct WritingScore(int score, string notes); } diff --git a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs similarity index 86% rename from dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs rename to dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs index 1ab559e668fb..3a48d407dea9 100644 --- a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs +++ b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs @@ -8,13 +8,13 @@ namespace GettingStarted; /// -/// A repeat of with logging enabled via assignment +/// A repeat of with logging enabled via assignment /// of a to . /// /// /// Samples become super noisy with logging always enabled. /// -public class Step7_Logging(ITestOutputHelper output) : BaseTest(output) +public class Step07_Logging(ITestOutputHelper output) : BaseAgentsTest(output) { private const string ReviewerName = "ArtDirector"; private const string ReviewerInstructions = @@ -81,16 +81,16 @@ public async Task UseLoggerFactoryWithAgentGroupChatAsync() }; // Invoke chat and display messages. - string input = "concept: maps made out of egg cartons."; - chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); + ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons."); + chat.AddChatMessage(input); + this.WriteAgentChatMessage(input); - await foreach (ChatMessageContent content in chat.InvokeAsync()) + await foreach (ChatMessageContent response in chat.InvokeAsync()) { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); + this.WriteAgentChatMessage(response); } - Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}"); + Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]"); } private sealed class ApprovalTerminationStrategy : TerminationStrategy diff --git a/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs new file mode 100644 index 000000000000..32c03a40a638 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs @@ -0,0 +1,142 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.ComponentModel; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; + +namespace GettingStarted; + +/// +/// This example demonstrates similarity between using +/// and (see: Step 2). +/// +public class Step08_Assistant(ITestOutputHelper output) : BaseAgentsTest(output) +{ + private const string HostName = "Host"; + private const string HostInstructions = "Answer questions about the menu."; + + [Fact] + public async Task UseSingleAssistantAgentAsync() + { + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + Instructions = HostInstructions, + Name = HostName, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); + + // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + agent.Kernel.Plugins.Add(plugin); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + try + { + await InvokeAgentAsync("Hello"); + await InvokeAgentAsync("What is the special soup?"); + await InvokeAgentAsync("What is the special drink?"); + await InvokeAgentAsync("Thank you"); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } + + [Fact] + public async Task UseTemplateForAssistantAgentAsync() + { + // Define the agent + string generateStoryYaml = EmbeddedResource.Read("GenerateStory.yaml"); + PromptTemplateConfig templateConfig = KernelFunctionYaml.ToPromptTemplateConfig(generateStoryYaml); + + // Instructions, Name and Description properties defined via the config. + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateFromTemplateAsync( + clientProvider: this.GetClientProvider(), + capabilities: new OpenAIAssistantCapabilities(this.Model) + { + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel(), + defaultArguments: new KernelArguments() + { + { "topic", "Dog" }, + { "length", "3" }, + }, + templateConfig); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + try + { + // Invoke the agent with the default arguments. + await InvokeAgentAsync(); + + // Invoke the agent with the override arguments. + await InvokeAgentAsync( + new() + { + { "topic", "Cat" }, + { "length", "3" }, + }); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + + // Local function to invoke agent and display the response. + async Task InvokeAgentAsync(KernelArguments? arguments = null) + { + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId, arguments)) + { + WriteAgentChatMessage(response); + } + } + } + + private sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() => + """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """; + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) => + "$9.99"; + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs new file mode 100644 index 000000000000..09b02d4ceebf --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs @@ -0,0 +1,74 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Resources; + +namespace GettingStarted; + +/// +/// Demonstrate providing image input to . +/// +public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(output) +{ + /// + /// Azure currently only supports message of type=text. + /// + protected override bool ForceOpenAI => true; + + [Fact] + public async Task UseSingleAssistantAgentAsync() + { + // Define the agent + OpenAIClientProvider provider = this.GetClientProvider(); + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + provider, + definition: new OpenAIAssistantDefinition(this.Model) + { + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); + + // Upload an image + await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!; + string fileId = await agent.UploadFileAsync(imageStream, "cat.jpg"); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + try + { + // Refer to public image by url + await InvokeAgentAsync(CreateMessageWithImageUrl("Describe this image.", "https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg")); + await InvokeAgentAsync(CreateMessageWithImageUrl("What are is the main color in this image?", "https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg")); + // Refer to uploaded image by file-id. + await InvokeAgentAsync(CreateMessageWithImageReference("Is there an animal in this image?", fileId)); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + await provider.Client.GetOpenAIFileClient().DeleteFileAsync(fileId); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(ChatMessageContent message) + { + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } + + private ChatMessageContent CreateMessageWithImageUrl(string input, string url) + => new(AuthorRole.User, [new TextContent(input), new ImageContent(new Uri(url))]); + + private ChatMessageContent CreateMessageWithImageReference(string input, string fileId) + => new(AuthorRole.User, [new TextContent(input), new FileReferenceContent(fileId)]); +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs new file mode 100644 index 000000000000..203009ffb561 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs @@ -0,0 +1,54 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace GettingStarted; + +/// +/// Demonstrate using code-interpreter on . +/// +public class Step10_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseCodeInterpreterToolWithAssistantAgentAsync() + { + // Define the agent + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new(this.Model) + { + EnableCodeInterpreter = true, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); + + // Create a thread for the agent conversation. + string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata }); + + // Respond to user input + try + { + await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?"); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs new file mode 100644 index 000000000000..77f4e5dbdff1 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs @@ -0,0 +1,84 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Files; +using OpenAI.VectorStores; +using Resources; + +namespace GettingStarted; + +/// +/// Demonstrate using code-interpreter on . +/// +public class Step11_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output) +{ + [Fact] + public async Task UseFileSearchToolWithAssistantAgentAsync() + { + // Define the agent + OpenAIClientProvider provider = this.GetClientProvider(); + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + clientProvider: this.GetClientProvider(), + definition: new OpenAIAssistantDefinition(this.Model) + { + EnableFileSearch = true, + Metadata = AssistantSampleMetadata, + }, + kernel: new Kernel()); + + // Upload file - Using a table of fictional employees. + OpenAIFileClient fileClient = provider.Client.GetOpenAIFileClient(); + await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!; + OpenAIFile fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants); + + // Create a vector-store + VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient(); + CreateVectorStoreOperation result = + await vectorStoreClient.CreateVectorStoreAsync(waitUntilCompleted: false, + new VectorStoreCreationOptions() + { + FileIds = { fileInfo.Id }, + Metadata = { { AssistantSampleMetadataKey, bool.TrueString } } + }); + + // Create a thread associated with a vector-store for the agent conversation. + string threadId = + await agent.CreateThreadAsync( + new OpenAIThreadCreationOptions + { + VectorStoreId = result.VectorStoreId, + Metadata = AssistantSampleMetadata, + }); + + // Respond to user input + try + { + await InvokeAgentAsync("Who is the youngest employee?"); + await InvokeAgentAsync("Who works in sales?"); + await InvokeAgentAsync("I have a customer request, who can help me?"); + } + finally + { + await agent.DeleteThreadAsync(threadId); + await agent.DeleteAsync(); + await vectorStoreClient.DeleteVectorStoreAsync(result.VectorStoreId); + await fileClient.DeleteFileAsync(fileInfo.Id); + } + + // Local function to invoke agent and display the conversation messages. + async Task InvokeAgentAsync(string input) + { + ChatMessageContent message = new(AuthorRole.User, input); + await agent.AddChatMessageAsync(threadId, message); + this.WriteAgentChatMessage(message); + + await foreach (ChatMessageContent response in agent.InvokeAsync(threadId)) + { + this.WriteAgentChatMessage(response); + } + } + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs deleted file mode 100644 index d7d4a0471b01..000000000000 --- a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs +++ /dev/null @@ -1,52 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace GettingStarted; - -/// -/// Demonstrate creation of and -/// eliciting its response to three explicit user messages. -/// -public class Step1_Agent(ITestOutputHelper output) : BaseTest(output) -{ - private const string ParrotName = "Parrot"; - private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound."; - - [Fact] - public async Task UseSingleChatCompletionAgentAsync() - { - // Define the agent - ChatCompletionAgent agent = - new() - { - Name = ParrotName, - Instructions = ParrotInstructions, - Kernel = this.CreateKernelWithChatCompletion(), - }; - - /// Create the chat history to capture the agent interaction. - ChatHistory chat = []; - - // Respond to user input - await InvokeAgentAsync("Fortune favors the bold."); - await InvokeAgentAsync("I came, I saw, I conquered."); - await InvokeAgentAsync("Practice makes perfect."); - - // Local function to invoke agent and display the conversation messages. - async Task InvokeAgentAsync(string input) - { - chat.Add(new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in agent.InvokeAsync(chat)) - { - chat.Add(content); - - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - } - } - } -} diff --git a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs b/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs deleted file mode 100644 index d9e9760e3fa6..000000000000 --- a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs +++ /dev/null @@ -1,100 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System.ComponentModel; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace GettingStarted; - -/// -/// This example demonstrates that outside of initialization (and cleanup), using -/// is no different from -/// even with with a . -/// -public class Step8_OpenAIAssistant(ITestOutputHelper output) : BaseTest(output) -{ - private const string HostName = "Host"; - private const string HostInstructions = "Answer questions about the menu."; - - [Fact] - public async Task UseSingleOpenAIAssistantAgentAsync() - { - // Define the agent - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - kernel: new(), - config: new(this.ApiKey, this.Endpoint), - new() - { - Instructions = HostInstructions, - Name = HostName, - ModelId = this.Model, - }); - - // Initialize plugin and add to the agent's Kernel (same as direct Kernel usage). - KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - agent.Kernel.Plugins.Add(plugin); - - // Create a thread for the agent interaction. - string threadId = await agent.CreateThreadAsync(); - - // Respond to user input - try - { - await InvokeAgentAsync("Hello"); - await InvokeAgentAsync("What is the special soup?"); - await InvokeAgentAsync("What is the special drink?"); - await InvokeAgentAsync("Thank you"); - } - finally - { - await agent.DeleteThreadAsync(threadId); - await agent.DeleteAsync(); - } - - // Local function to invoke agent and display the conversation messages. - async Task InvokeAgentAsync(string input) - { - await agent.AddChatMessageAsync(threadId, new ChatMessageContent(AuthorRole.User, input)); - - Console.WriteLine($"# {AuthorRole.User}: '{input}'"); - - await foreach (ChatMessageContent content in agent.InvokeAsync(threadId)) - { - if (content.Role != AuthorRole.Tool) - { - Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'"); - } - } - } - } - - private sealed class MenuPlugin - { - public const string CorrelationIdArgument = "correlationId"; - - private readonly List _correlationIds = []; - - public IReadOnlyList CorrelationIds => this._correlationIds; - - [KernelFunction, Description("Provides a list of specials from the menu.")] - [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] - public string GetSpecials() - { - return @" -Special Soup: Clam Chowder -Special Salad: Cobb Salad -Special Drink: Chai Tea -"; - } - - [KernelFunction, Description("Provides the price of the requested menu item.")] - public string GetItemPrice( - [Description("The name of the menu item.")] - string menuItem) - { - return "$9.99"; - } - } -} diff --git a/dotnet/samples/GettingStartedWithProcesses/Events/CommonEvents.cs b/dotnet/samples/GettingStartedWithProcesses/Events/CommonEvents.cs new file mode 100644 index 000000000000..1c38a6533f39 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Events/CommonEvents.cs @@ -0,0 +1,11 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +namespace Events; + +/// +/// Processes Events emitted by shared steps.
+///
+public static class CommonEvents +{ + public static readonly string UserInputReceived = nameof(UserInputReceived); + public static readonly string AssistantResponseGenerated = nameof(AssistantResponseGenerated); +} diff --git a/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj b/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj new file mode 100644 index 000000000000..b0113d35ff49 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/GettingStartedWithProcesses.csproj @@ -0,0 +1,59 @@ + + + + GettingStartedWithProcesses + net8.0 + enable + enable + false + true + + + + $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0101,SKEXP0110,OPENAI001 + Library + 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Always + + + + \ No newline at end of file diff --git a/dotnet/samples/GettingStartedWithProcesses/README.md b/dotnet/samples/GettingStartedWithProcesses/README.md new file mode 100644 index 000000000000..9ebfd308a062 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/README.md @@ -0,0 +1,127 @@ +# Semantic Kernel Processes - Getting Started + +This project contains a step by step guide to get started with _Semantic Kernel Processes_. + + +#### NuGet: +- [Microsoft.SemanticKernel.Process.Abstractions](https://www.nuget.org/packages/Microsoft.SemanticKernel.Process.Abstractions) +- [Microsoft.SemanticKernel.Process.Core](https://www.nuget.org/packages/Microsoft.SemanticKernel.Process.Core) +- [Microsoft.SemanticKernel.Process.LocalRuntime](https://www.nuget.org/packages/Microsoft.SemanticKernel.Process.LocalRuntime) + +#### Sources +- [Semantic Kernel Processes - Abstractions](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Experimental/Process.Abstractions) +- [Semantic Kernel Processes - Core](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Experimental/Process.Core) +- [Semantic Kernel Processes - LocalRuntime](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Experimental/Process.LocalRuntime) + +The examples can be run as integration tests but their code can also be copied to stand-alone programs. + +## Examples + +The getting started with agents examples include: + +Example|Description +---|--- +[Step01_Processes](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithProcesses/Step01/Step01_Processes.cs)|How to create a simple process with a loop and a conditional exit +[Step02_AccountOpening](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithProcesses/Step02/Step02_AccountOpening.cs)|Showcasing processes cycles, fan in, fan out for opening an account. + +### Step01_Processes + +```mermaid +flowchart LR + Intro(Intro)--> UserInput(User Input) + UserInput-->|User message == 'exit'| Exit(Exit) + UserInput-->|User message| AssistantResponse(Assistant Response) + AssistantResponse--> UserInput +``` + +### Step02_AccountOpening + +```mermaid +flowchart LR + User(User) -->|Provides user details| FillForm(Fill New
Customer
Form) + + FillForm -->|Need more info| AssistantMessage(Assistant
Message) + FillForm -->|Welcome Message| AssistantMessage + FillForm --> CompletedForm((Completed Form)) + AssistantMessage --> User + + CompletedForm --> CreditCheck(Customer
Credit Score
Check) + CompletedForm --> Fraud(Fraud Detection) + CompletedForm -->|New Customer Form + Conversation Transcript| CoreSystem + + CreditCheck -->|Failed - Notify user about insufficient credit score| Mailer(Mail
Service) + CreditCheck -->|Approved| Fraud + + Fraud --> |Failed - Notify user about failure to confirm user identity| Mailer + Fraud --> |Passed| CoreSystem(Core System
Record
Creation) + + CoreSystem --> Marketing(New Marketing
Record Creation) + CoreSystem --> CRM(CRM Record
Creation) + CoreSystem -->|Account Details| Welcome(Welcome
Packet) + + Marketing -->|Success| Welcome + CRM -->|Success| Welcome + + Welcome -->|Success: Notify User about Account Creation| Mailer + Mailer -->|End of Interaction| User +``` + + +## Running Examples with Filters +Examples may be explored and ran within _Visual Studio_ using _Test Explorer_. + +You can also run specific examples via the command-line by using test filters (`dotnet test --filter`). Type `dotnet test --help` at the command line for more details. + +Example: + +``` +dotnet test --filter Step01_Processes +``` + +## Configuring Secrets + +Each example requires secrets / credentials to access OpenAI or Azure OpenAI. + +We suggest using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests. You can also use environment variables if you prefer. + +To set your secrets with .NET Secret Manager: + +1. Navigate the console to the project folder: + + ``` + cd dotnet/samples/GettingStartedWithProcesses + ``` + +2. Examine existing secret definitions: + + ``` + dotnet user-secrets list + ``` + +3. If needed, perform first time initialization: + + ``` + dotnet user-secrets init + ``` + +4. Define secrets for either Open AI: + + ``` + dotnet user-secrets set "OpenAI:ChatModelId" "..." + dotnet user-secrets set "OpenAI:ApiKey" "..." + ``` + +5. Or Azure Open AI: + + ``` + dotnet user-secrets set "AzureOpenAI:DeploymentName" "..." + dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..." + dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/" + dotnet user-secrets set "AzureOpenAI:ApiKey" "..." + ``` + +> NOTE: Azure secrets will take precedence, if both Open AI and Azure Open AI secrets are defined, unless `ForceOpenAI` is set: + +``` +protected override bool ForceOpenAI => true; +``` diff --git a/dotnet/samples/GettingStartedWithProcesses/SharedSteps/DisplayAssistantMessageStep.cs b/dotnet/samples/GettingStartedWithProcesses/SharedSteps/DisplayAssistantMessageStep.cs new file mode 100644 index 000000000000..6a6a0f8ddf1e --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/SharedSteps/DisplayAssistantMessageStep.cs @@ -0,0 +1,29 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Events; +using Microsoft.SemanticKernel; + +namespace SharedSteps; + +/// +/// Step used in the Processes Samples: +/// - Step_02_AccountOpening.cs +/// +public class DisplayAssistantMessageStep : KernelProcessStep +{ + public static class Functions + { + public const string DisplayAssistantMessage = nameof(DisplayAssistantMessage); + } + + [KernelFunction(Functions.DisplayAssistantMessage)] + public async ValueTask DisplayAssistantMessageAsync(KernelProcessStepContext context, string assistantMessage) + { + Console.ForegroundColor = ConsoleColor.Blue; + Console.WriteLine($"ASSISTANT: {assistantMessage}\n"); + Console.ResetColor(); + + // Emit the assistantMessageGenerated + await context.EmitEventAsync(new() { Id = CommonEvents.AssistantResponseGenerated, Data = assistantMessage }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/SharedSteps/ScriptedUserInputStep.cs b/dotnet/samples/GettingStartedWithProcesses/SharedSteps/ScriptedUserInputStep.cs new file mode 100644 index 000000000000..95ed47e7ddff --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/SharedSteps/ScriptedUserInputStep.cs @@ -0,0 +1,80 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Events; +using Microsoft.SemanticKernel; + +namespace SharedSteps; + +/// +/// A step that elicits user input. +/// +/// Step used in the Processes Samples: +/// - Step_01_Processes.cs +/// - Step_02_AccountOpening.cs +/// +public class ScriptedUserInputStep : KernelProcessStep +{ + public static class Functions + { + public const string GetUserInput = nameof(GetUserInput); + } + + /// + /// The state object for the user input step. This object holds the user inputs and the current input index. + /// + protected UserInputState? _state; + + /// + /// Method to be overridden by the user to populate with custom user messages + /// + public virtual void PopulateUserInputs() + { + return; + } + + /// + /// Activates the user input step by initializing the state object. This method is called when the process is started + /// and before any of the KernelFunctions are invoked. + /// + /// The state object for the step. + /// A + public override ValueTask ActivateAsync(KernelProcessStepState state) + { + state.State ??= new(); + _state = state.State; + + PopulateUserInputs(); + + return ValueTask.CompletedTask; + } + + /// + /// Gets the user input. + /// + /// An instance of which can be + /// used to emit events from within a KernelFunction. + /// A + [KernelFunction(Functions.GetUserInput)] + public async ValueTask GetUserInputAsync(KernelProcessStepContext context) + { + var userMessage = _state!.UserInputs[_state.CurrentInputIndex]; + _state.CurrentInputIndex++; + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($"USER: {userMessage}"); + Console.ResetColor(); + + // Emit the user input + await context.EmitEventAsync(new() { Id = CommonEvents.UserInputReceived, Data = userMessage }); + } +} + +/// +/// The state object for the +/// +public record UserInputState +{ + public List UserInputs { get; init; } = []; + + public int CurrentInputIndex { get; set; } = 0; +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step01/Step01_Processes.cs b/dotnet/samples/GettingStartedWithProcesses/Step01/Step01_Processes.cs new file mode 100644 index 000000000000..6b853d7ce4ee --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step01/Step01_Processes.cs @@ -0,0 +1,181 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Events; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using SharedSteps; + +namespace Step01; + +/// +/// Demonstrate creation of and +/// eliciting its response to three explicit user messages. +/// +public class Step01_Processes(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) +{ + /// + /// Demonstrates the creation of a simple process that has multiple steps, takes + /// user input, interacts with the chat completion service, and demonstrates cycles + /// in the process. + /// + /// A + [Fact] + public async Task UseSimpleProcessAsync() + { + // Create a kernel with a chat completion service + Kernel kernel = Kernel.CreateBuilder() + .AddOpenAIChatCompletion( + modelId: TestConfiguration.OpenAI.ChatModelId, + apiKey: TestConfiguration.OpenAI.ApiKey) + .Build(); + + // Create a process that will interact with the chat completion service + ProcessBuilder process = new("ChatBot"); + var introStep = process.AddStepFromType(); + var userInputStep = process.AddStepFromType(); + var responseStep = process.AddStepFromType(); + + // Define the behavior when the process receives an external event + process + .OnInputEvent(ChatBotEvents.StartProcess) + .SendEventTo(new ProcessFunctionTargetBuilder(introStep)); + + // When the intro is complete, notify the userInput step + introStep + .OnFunctionResult(nameof(IntroStep.PrintIntroMessage)) + .SendEventTo(new ProcessFunctionTargetBuilder(userInputStep)); + + // When the userInput step emits an exit event, send it to the end step + userInputStep + .OnEvent(ChatBotEvents.Exit) + .StopProcess(); + + // When the userInput step emits a user input event, send it to the assistantResponse step + userInputStep + .OnEvent(CommonEvents.UserInputReceived) + .SendEventTo(new ProcessFunctionTargetBuilder(responseStep, parameterName: "userMessage")); + + // When the assistantResponse step emits a response, send it to the userInput step + responseStep + .OnEvent(ChatBotEvents.AssistantResponseGenerated) + .SendEventTo(new ProcessFunctionTargetBuilder(userInputStep)); + + // Build the process to get a handle that can be started + KernelProcess kernelProcess = process.Build(); + + // Start the process with an initial external event + var runningProcess = await kernelProcess.StartAsync(kernel, new KernelProcessEvent() { Id = ChatBotEvents.StartProcess, Data = null }); + } + + /// + /// The simplest implementation of a process step. IntroStep + /// + private sealed class IntroStep : KernelProcessStep + { + /// + /// Prints an introduction message to the console. + /// + [KernelFunction] + public void PrintIntroMessage() + { + System.Console.WriteLine("Welcome to Processes in Semantic Kernel.\n"); + } + } + + /// + /// A step that elicits user input. + /// + private sealed class ChatUserInputStep : ScriptedUserInputStep + { + public override void PopulateUserInputs() + { + if (_state != null) + { + _state.UserInputs.Add("Hello"); + _state.UserInputs.Add("How tall is the tallest mountain?"); + _state.UserInputs.Add("How low is the lowest valley?"); + _state.UserInputs.Add("How wide is the widest river?"); + _state.UserInputs.Add("exit"); + } + } + } + + /// + /// A step that takes the user input from a previous step and generates a response from the chat completion service. + /// + private sealed class ChatBotResponseStep : KernelProcessStep + { + public static class Functions + { + public const string GetChatResponse = nameof(GetChatResponse); + } + + /// + /// The internal state object for the chat bot response step. + /// + internal ChatBotState? _state; + + /// + /// ActivateAsync is the place to initialize the state object for the step. + /// + /// An instance of + /// A + public override ValueTask ActivateAsync(KernelProcessStepState state) + { + _state = state.State ?? new(); + _state.ChatMessages ??= new(); + return ValueTask.CompletedTask; + } + + /// + /// Generates a response from the chat completion service. + /// + /// The context for the current step and process. + /// The user message from a previous step. + /// A instance. + /// + [KernelFunction(Functions.GetChatResponse)] + public async Task GetChatResponseAsync(KernelProcessStepContext context, string userMessage, Kernel _kernel) + { + _state!.ChatMessages.Add(new(AuthorRole.User, userMessage)); + IChatCompletionService chatService = _kernel.Services.GetRequiredService(); + ChatMessageContent response = await chatService.GetChatMessageContentAsync(_state.ChatMessages).ConfigureAwait(false); + if (response == null) + { + throw new InvalidOperationException("Failed to get a response from the chat completion service."); + } + + System.Console.ForegroundColor = ConsoleColor.Yellow; + System.Console.Write("Assistant: "); + System.Console.ResetColor(); + System.Console.WriteLine(response.Content); + + // Update state with the response + _state.ChatMessages.Add(response); + + // emit event: assistantResponse + await context.EmitEventAsync(new KernelProcessEvent { Id = ChatBotEvents.AssistantResponseGenerated, Data = response }); + } + } + + /// + /// The state object for the . + /// + private sealed class ChatBotState + { + internal ChatHistory ChatMessages { get; set; } = new(); + } + + /// + /// A class that defines the events that can be emitted by the chat bot process. This is + /// not required but used to ensure that the event names are consistent. + /// + private static class ChatBotEvents + { + public const string StartProcess = "startProcess"; + public const string IntroComplete = "introComplete"; + public const string AssistantResponseGenerated = "assistantResponseGenerated"; + public const string Exit = "exit"; + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountDetails.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountDetails.cs new file mode 100644 index 000000000000..6f732669d5dc --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountDetails.cs @@ -0,0 +1,19 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Step02.Models; + +/// +/// Represents the data structure for a form capturing details of a new customer, including personal information, contact details, account id and account type.
+/// Class used in samples +///
+public class AccountDetails : NewCustomerForm +{ + public Guid AccountId { get; set; } + public AccountType AccountType { get; set; } +} + +public enum AccountType +{ + PrimeABC, + Other, +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountOpeningEvents.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountOpeningEvents.cs new file mode 100644 index 000000000000..de1110854e27 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountOpeningEvents.cs @@ -0,0 +1,33 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +namespace Step02.Models; + +/// +/// Processes Events related to Account Opening scenarios.
+/// Class used in samples +///
+public static class AccountOpeningEvents +{ + public static readonly string StartProcess = nameof(StartProcess); + + public static readonly string NewCustomerFormWelcomeMessageComplete = nameof(NewCustomerFormWelcomeMessageComplete); + public static readonly string NewCustomerFormCompleted = nameof(NewCustomerFormCompleted); + public static readonly string NewCustomerFormNeedsMoreDetails = nameof(NewCustomerFormNeedsMoreDetails); + public static readonly string CustomerInteractionTranscriptReady = nameof(CustomerInteractionTranscriptReady); + + public static readonly string CreditScoreCheckApproved = nameof(CreditScoreCheckApproved); + public static readonly string CreditScoreCheckRejected = nameof(CreditScoreCheckRejected); + + public static readonly string FraudDetectionCheckPassed = nameof(FraudDetectionCheckPassed); + public static readonly string FraudDetectionCheckFailed = nameof(FraudDetectionCheckFailed); + + public static readonly string NewAccountDetailsReady = nameof(NewAccountDetailsReady); + + public static readonly string NewMarketingRecordInfoReady = nameof(NewMarketingRecordInfoReady); + public static readonly string NewMarketingEntryCreated = nameof(NewMarketingEntryCreated); + public static readonly string CRMRecordInfoReady = nameof(CRMRecordInfoReady); + public static readonly string CRMRecordInfoEntryCreated = nameof(CRMRecordInfoEntryCreated); + + public static readonly string WelcomePacketCreated = nameof(WelcomePacketCreated); + + public static readonly string MailServiceSent = nameof(MailServiceSent); +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountUserInteractionDetails.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountUserInteractionDetails.cs new file mode 100644 index 000000000000..123f0b2e417d --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/AccountUserInteractionDetails.cs @@ -0,0 +1,26 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; + +namespace Step02.Models; + +/// +/// Represents the details of interactions between a user and service, including a unique identifier for the account, +/// a transcript of conversation with the user, and the type of user interaction.
+/// Class used in samples +///
+public record AccountUserInteractionDetails +{ + public Guid AccountId { get; set; } + + public List InteractionTranscript { get; set; } = []; + + public UserInteractionType UserInteractionType { get; set; } +} + +public enum UserInteractionType +{ + Complaint, + AccountInfoRequest, + OpeningNewAccount +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Models/MarketingNewEntryDetails.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/MarketingNewEntryDetails.cs new file mode 100644 index 000000000000..057e97c81597 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/MarketingNewEntryDetails.cs @@ -0,0 +1,18 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Step02.Models; + +/// +/// Holds details for a new entry in a marketing database, including the account identifier, contact name, phone number, and email address.
+/// Class used in samples +///
+public record MarketingNewEntryDetails +{ + public Guid AccountId { get; set; } + + public string Name { get; set; } + + public string PhoneNumber { get; set; } + + public string Email { get; set; } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Models/NewCustomerForm.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/NewCustomerForm.cs new file mode 100644 index 000000000000..c000b8491d24 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Models/NewCustomerForm.cs @@ -0,0 +1,69 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using System.Text.Json.Serialization; + +namespace Step02.Models; + +/// +/// Represents the data structure for a form capturing details of a new customer, including personal information and contact details.
+/// Class used in samples +///
+public class NewCustomerForm +{ + [JsonPropertyName("userFirstName")] + public string UserFirstName { get; set; } = string.Empty; + + [JsonPropertyName("userLastName")] + public string UserLastName { get; set; } = string.Empty; + + [JsonPropertyName("userDateOfBirth")] + public string UserDateOfBirth { get; set; } = string.Empty; + + [JsonPropertyName("userState")] + public string UserState { get; set; } = string.Empty; + + [JsonPropertyName("userPhoneNumber")] + public string UserPhoneNumber { get; set; } = string.Empty; + + [JsonPropertyName("userId")] + public string UserId { get; set; } = string.Empty; + + [JsonPropertyName("userEmail")] + public string UserEmail { get; set; } = string.Empty; + + public NewCustomerForm CopyWithDefaultValues(string defaultStringValue = "Unanswered") + { + NewCustomerForm copy = new(); + PropertyInfo[] properties = typeof(NewCustomerForm).GetProperties(); + + foreach (PropertyInfo property in properties) + { + // Get the value of the property + string? value = property.GetValue(this) as string; + + // Check if the value is an empty string + if (string.IsNullOrEmpty(value)) + { + property.SetValue(copy, defaultStringValue); + } + else + { + property.SetValue(copy, value); + } + } + + return copy; + } + + public bool IsFormCompleted() + { + return !string.IsNullOrEmpty(UserFirstName) && + !string.IsNullOrEmpty(UserLastName) && + !string.IsNullOrEmpty(UserId) && + !string.IsNullOrEmpty(UserDateOfBirth) && + !string.IsNullOrEmpty(UserState) && + !string.IsNullOrEmpty(UserEmail) && + !string.IsNullOrEmpty(UserPhoneNumber); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Step02_AccountOpening.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Step02_AccountOpening.cs new file mode 100644 index 000000000000..f98632e1788e --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Step02_AccountOpening.cs @@ -0,0 +1,225 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Events; +using Microsoft.SemanticKernel; +using SharedSteps; +using Step02.Models; +using Step02.Steps; + +namespace Step02; + +/// +/// Demonstrate creation of and +/// eliciting its response to five explicit user messages.
+/// For each test there is a different set of user messages that will cause different steps to be triggered using the same pipeline.
+/// For visual reference of the process check the diagram . +///
+public class Step02_AccountOpening(ITestOutputHelper output) : BaseTest(output, redirectSystemConsoleOutput: true) +{ + private KernelProcess SetupAccountOpeningProcess() where TUserInputStep : ScriptedUserInputStep + { + ProcessBuilder process = new("AccountOpeningProcess"); + var newCustomerFormStep = process.AddStepFromType(); + var userInputStep = process.AddStepFromType(); + var displayAssistantMessageStep = process.AddStepFromType(); + var customerCreditCheckStep = process.AddStepFromType(); + var fraudDetectionCheckStep = process.AddStepFromType(); + var mailServiceStep = process.AddStepFromType(); + var coreSystemRecordCreationStep = process.AddStepFromType(); + var marketingRecordCreationStep = process.AddStepFromType(); + var crmRecordStep = process.AddStepFromType(); + var welcomePacketStep = process.AddStepFromType(); + + process.OnInputEvent(AccountOpeningEvents.StartProcess) + .SendEventTo(new ProcessFunctionTargetBuilder(newCustomerFormStep, CompleteNewCustomerFormStep.Functions.NewAccountWelcome)); + + // When the welcome message is generated, send message to displayAssistantMessageStep + newCustomerFormStep + .OnEvent(AccountOpeningEvents.NewCustomerFormWelcomeMessageComplete) + .SendEventTo(new ProcessFunctionTargetBuilder(displayAssistantMessageStep, DisplayAssistantMessageStep.Functions.DisplayAssistantMessage)); + + // When the userInput step emits a user input event, send it to the newCustomerForm step + // Function names are necessary when the step has multiple public functions like CompleteNewCustomerFormStep: NewAccountWelcome and NewAccountProcessUserInfo + userInputStep + .OnEvent(CommonEvents.UserInputReceived) + .SendEventTo(new ProcessFunctionTargetBuilder(newCustomerFormStep, CompleteNewCustomerFormStep.Functions.NewAccountProcessUserInfo, "userMessage")); + + // When the newCustomerForm step emits needs more details, send message to displayAssistantMessage step + newCustomerFormStep + .OnEvent(AccountOpeningEvents.NewCustomerFormNeedsMoreDetails) + .SendEventTo(new ProcessFunctionTargetBuilder(displayAssistantMessageStep, DisplayAssistantMessageStep.Functions.DisplayAssistantMessage)); + + // After any assistant message is displayed, user input is expected to the next step is the userInputStep + displayAssistantMessageStep + .OnEvent(CommonEvents.AssistantResponseGenerated) + .SendEventTo(new ProcessFunctionTargetBuilder(userInputStep, ScriptedUserInputStep.Functions.GetUserInput)); + + // When the newCustomerForm is completed, the information gets passed to the core system record creation step + newCustomerFormStep + .OnEvent(AccountOpeningEvents.NewCustomerFormCompleted) + .SendEventTo(new ProcessFunctionTargetBuilder(customerCreditCheckStep, functionName: CreditScoreCheckStep.Functions.DetermineCreditScore, parameterName: "customerDetails")); + + // When the newCustomerForm is completed, the information gets passed to the fraud detection step for validation + newCustomerFormStep + .OnEvent(AccountOpeningEvents.NewCustomerFormCompleted) + .SendEventTo(new ProcessFunctionTargetBuilder(fraudDetectionCheckStep, functionName: FraudDetectionStep.Functions.FraudDetectionCheck, parameterName: "customerDetails")); + + // When the newCustomerForm is completed, the information gets passed to the core system record creation step + newCustomerFormStep + .OnEvent(AccountOpeningEvents.NewCustomerFormCompleted) + .SendEventTo(new ProcessFunctionTargetBuilder(coreSystemRecordCreationStep, functionName: NewAccountStep.Functions.CreateNewAccount, parameterName: "customerDetails")); + + // When the newCustomerForm is completed, the user interaction transcript with the user is passed to the core system record creation step + newCustomerFormStep + .OnEvent(AccountOpeningEvents.CustomerInteractionTranscriptReady) + .SendEventTo(new ProcessFunctionTargetBuilder(coreSystemRecordCreationStep, functionName: NewAccountStep.Functions.CreateNewAccount, parameterName: "interactionTranscript")); + + // When the creditScoreCheck step results in Rejection, the information gets to the mailService step to notify the user about the state of the application and the reasons + customerCreditCheckStep + .OnEvent(AccountOpeningEvents.CreditScoreCheckRejected) + .SendEventTo(new ProcessFunctionTargetBuilder(mailServiceStep, functionName: MailServiceStep.Functions.SendMailToUserWithDetails, parameterName: "message")); + + // When the creditScoreCheck step results in Approval, the information gets to the fraudDetection step to kickstart this step + customerCreditCheckStep + .OnEvent(AccountOpeningEvents.CreditScoreCheckApproved) + .SendEventTo(new ProcessFunctionTargetBuilder(fraudDetectionCheckStep, functionName: FraudDetectionStep.Functions.FraudDetectionCheck, parameterName: "previousCheckSucceeded")); + + // When the fraudDetectionCheck step fails, the information gets to the mailService step to notify the user about the state of the application and the reasons + fraudDetectionCheckStep + .OnEvent(AccountOpeningEvents.FraudDetectionCheckFailed) + .SendEventTo(new ProcessFunctionTargetBuilder(mailServiceStep, functionName: MailServiceStep.Functions.SendMailToUserWithDetails, parameterName: "message")); + + // When the fraudDetectionCheck step passes, the information gets to core system record creation step to kickstart this step + fraudDetectionCheckStep + .OnEvent(AccountOpeningEvents.FraudDetectionCheckPassed) + .SendEventTo(new ProcessFunctionTargetBuilder(coreSystemRecordCreationStep, functionName: NewAccountStep.Functions.CreateNewAccount, parameterName: "previousCheckSucceeded")); + + // When the coreSystemRecordCreation step successfully creates a new accountId, it will trigger the creation of a new marketing entry through the marketingRecordCreation step + coreSystemRecordCreationStep + .OnEvent(AccountOpeningEvents.NewMarketingRecordInfoReady) + .SendEventTo(new ProcessFunctionTargetBuilder(marketingRecordCreationStep, functionName: NewMarketingEntryStep.Functions.CreateNewMarketingEntry, parameterName: "userDetails")); + + // When the coreSystemRecordCreation step successfully creates a new accountId, it will trigger the creation of a new CRM entry through the crmRecord step + coreSystemRecordCreationStep + .OnEvent(AccountOpeningEvents.CRMRecordInfoReady) + .SendEventTo(new ProcessFunctionTargetBuilder(crmRecordStep, functionName: CRMRecordCreationStep.Functions.CreateCRMEntry, parameterName: "userInteractionDetails")); + + // ParameterName is necessary when the step has multiple input arguments like welcomePacketStep.CreateWelcomePacketAsync + // When the coreSystemRecordCreation step successfully creates a new accountId, it will pass the account information details to the welcomePacket step + coreSystemRecordCreationStep + .OnEvent(AccountOpeningEvents.NewAccountDetailsReady) + .SendEventTo(new ProcessFunctionTargetBuilder(welcomePacketStep, parameterName: "accountDetails")); + + // When the marketingRecordCreation step successfully creates a new marketing entry, it will notify the welcomePacket step it is ready + marketingRecordCreationStep + .OnEvent(AccountOpeningEvents.NewMarketingEntryCreated) + .SendEventTo(new ProcessFunctionTargetBuilder(welcomePacketStep, parameterName: "marketingEntryCreated")); + + // When the crmRecord step successfully creates a new CRM entry, it will notify the welcomePacket step it is ready + crmRecordStep + .OnEvent(AccountOpeningEvents.CRMRecordInfoEntryCreated) + .SendEventTo(new ProcessFunctionTargetBuilder(welcomePacketStep, parameterName: "crmRecordCreated")); + + // After crmRecord and marketing gets created, a welcome packet is created to then send information to the user with the mailService step + welcomePacketStep + .OnEvent(AccountOpeningEvents.WelcomePacketCreated) + .SendEventTo(new ProcessFunctionTargetBuilder(mailServiceStep, functionName: MailServiceStep.Functions.SendMailToUserWithDetails, parameterName: "message")); + + // All possible paths end up with the user being notified about the account creation decision throw the mailServiceStep completion + mailServiceStep + .OnEvent(AccountOpeningEvents.MailServiceSent) + .StopProcess(); + + KernelProcess kernelProcess = process.Build(); + + return kernelProcess; + } + + /// + /// This test uses a specific userId and DOB that makes the creditScore and Fraud detection to pass + /// + [Fact] + public async Task UseAccountOpeningProcessSuccessfulInteractionAsync() + { + Kernel kernel = CreateKernelWithChatCompletion(); + KernelProcess kernelProcess = SetupAccountOpeningProcess(); + var runningProcess = await kernelProcess.StartAsync(kernel, new KernelProcessEvent() { Id = AccountOpeningEvents.StartProcess, Data = null }); + } + + private sealed class UserInputSuccessfulInteraction : ScriptedUserInputStep + { + public override void PopulateUserInputs() + { + if (_state != null) + { + _state.UserInputs.Add("I would like to open an account"); + _state.UserInputs.Add("My name is John Contoso, dob 02/03/1990"); + _state.UserInputs.Add("I live in Washington and my phone number es 222-222-1234"); + _state.UserInputs.Add("My userId is 987-654-3210"); + _state.UserInputs.Add("My email is john.contoso@contoso.com, what else do you need?"); + } + } + } + + /// + /// This test uses a specific DOB that makes the creditScore to fail + /// + [Fact] + public async Task UseAccountOpeningProcessFailureDueToCreditScoreFailureAsync() + { + Kernel kernel = CreateKernelWithChatCompletion(); + KernelProcess kernelProcess = SetupAccountOpeningProcess(); + var runningProcess = await kernelProcess.StartAsync(kernel, new KernelProcessEvent() { Id = AccountOpeningEvents.StartProcess, Data = null }); + } + + private sealed class UserInputCreditScoreFailureInteraction : ScriptedUserInputStep + { + public override void PopulateUserInputs() + { + if (_state != null) + { + _state.UserInputs.Add("I would like to open an account"); + _state.UserInputs.Add("My name is John Contoso, dob 01/01/1990"); + _state.UserInputs.Add("I live in Washington and my phone number es 222-222-1234"); + _state.UserInputs.Add("My userId is 987-654-3210"); + _state.UserInputs.Add("My email is john.contoso@contoso.com, what else do you need?"); + } + } + } + + /// + /// This test uses a specific userId that makes the fraudDetection to fail + /// + [Fact] + public async Task UseAccountOpeningProcessFailureDueToFraudFailureAsync() + { + Kernel kernel = CreateKernelWithChatCompletion(); + KernelProcess kernelProcess = SetupAccountOpeningProcess(); + var runningProcess = await kernelProcess.StartAsync(kernel, new KernelProcessEvent() { Id = AccountOpeningEvents.StartProcess, Data = null }); + } + + private sealed class UserInputFraudFailureInteraction : ScriptedUserInputStep + { + public override void PopulateUserInputs() + { + if (_state != null) + { + _state.UserInputs.Add("I would like to open an account"); + _state.UserInputs.Add("My name is John Contoso, dob 02/03/1990"); + _state.UserInputs.Add("I live in Washington and my phone number es 222-222-1234"); + _state.UserInputs.Add("My userId is 123-456-7890"); + _state.UserInputs.Add("My email is john.contoso@contoso.com, what else do you need?"); + } + } + } + + protected new Kernel CreateKernelWithChatCompletion() + { + var builder = Kernel.CreateBuilder(); + builder.AddOpenAIChatCompletion( + TestConfiguration.OpenAI.ChatModelId, + TestConfiguration.OpenAI.ApiKey); + + return builder.Build(); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CRMRecordCreationStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CRMRecordCreationStep.cs new file mode 100644 index 000000000000..10eb2aee468e --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CRMRecordCreationStep.cs @@ -0,0 +1,24 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates the creation of a new CRM entry +/// +public class CRMRecordCreationStep : KernelProcessStep +{ + public static class Functions + { + public const string CreateCRMEntry = nameof(CreateCRMEntry); + } + + [KernelFunction(Functions.CreateCRMEntry)] + public async Task CreateCRMEntryAsync(KernelProcessStepContext context, AccountUserInteractionDetails userInteractionDetails, Kernel _kernel) + { + // Placeholder for a call to API to create new CRM entry + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.CRMRecordInfoEntryCreated, Data = true }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CompleteNewCustomerFormStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CompleteNewCustomerFormStep.cs new file mode 100644 index 000000000000..88fb0b9827e9 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CompleteNewCustomerFormStep.cs @@ -0,0 +1,213 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Step that is helps the user fill up a new account form.
+/// Also provides a welcome message for the user. +///
+public class CompleteNewCustomerFormStep : KernelProcessStep +{ + public static class Functions + { + public const string NewAccountProcessUserInfo = nameof(NewAccountProcessUserInfo); + public const string NewAccountWelcome = nameof(NewAccountWelcome); + } + + internal NewCustomerFormState? _state; + + internal string _formCompletionSystemPrompt = """ + The goal is to fill up all the fields needed for a form. + The user may provide information to fill up multiple fields of the form in one message. + The user needs to fill up a form, all the fields of the form are necessary + + + {{current_form_state}} + + + GUIDANCE: + - If there are missing details, give the user a useful message that will help fill up the remaining fields. + - Your goal is to help guide the user to provide the missing details on the current form. + - Encourage the user to provide the remainingdetails with examples if necessary. + - Fields with value 'Unanswered' need to be answered by the user. + - For date fields, confirm with the user first if the date format is not clear. Example 02/03 03/02 could be March 2nd or February 3rd. + """; + + internal string _welcomeMessage = """ + Hello there, I can help you out fill out the information needed to open a new account with us. + Please provide some personal information like first name and last name to get started. + """; + + private readonly JsonSerializerOptions _jsonOptions = new() + { + DefaultIgnoreCondition = JsonIgnoreCondition.Never + }; + + public override ValueTask ActivateAsync(KernelProcessStepState state) + { + _state = state.State ?? new(); + _state.newCustomerForm ??= new(); + return ValueTask.CompletedTask; + } + + [KernelFunction(Functions.NewAccountWelcome)] + public async Task NewAccountWelcomeMessageAsync(KernelProcessStepContext context, Kernel _kernel) + { + _state?.conversation.Add(new ChatMessageContent { Role = AuthorRole.Assistant, Content = _welcomeMessage }); + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.NewCustomerFormWelcomeMessageComplete, Data = _welcomeMessage }); + } + + private Kernel CreateNewCustomerFormKernel(Kernel _baseKernel) + { + // Creating another kernel that only makes use private functions to fill up the new customer form + Kernel kernel = new(_baseKernel.Services); + kernel.ImportPluginFromFunctions("FillForm", [ + KernelFunctionFactory.CreateFromMethod(OnUserProvidedFirstName, functionName: nameof(OnUserProvidedFirstName)), + KernelFunctionFactory.CreateFromMethod(OnUserProvidedLastName, functionName: nameof(OnUserProvidedLastName)), + KernelFunctionFactory.CreateFromMethod(OnUserProvidedDOBDetails, functionName: nameof(OnUserProvidedDOBDetails)), + KernelFunctionFactory.CreateFromMethod(OnUserProvidedStateOfResidence, functionName: nameof(OnUserProvidedStateOfResidence)), + KernelFunctionFactory.CreateFromMethod(OnUserProvidedPhoneNumber, functionName: nameof(OnUserProvidedPhoneNumber)), + KernelFunctionFactory.CreateFromMethod(OnUserProvidedUserId, functionName: nameof(OnUserProvidedUserId)), + KernelFunctionFactory.CreateFromMethod(OnUserProvidedEmailAddress, functionName: nameof(OnUserProvidedEmailAddress)), + ]); + + return kernel; + } + + [KernelFunction(Functions.NewAccountProcessUserInfo)] + public async Task CompleteNewCustomerFormAsync(KernelProcessStepContext context, string userMessage, Kernel _kernel) + { + // Keeping track of all user interactions + _state?.conversation.Add(new ChatMessageContent { Role = AuthorRole.User, Content = userMessage }); + + Kernel kernel = CreateNewCustomerFormKernel(_kernel); + + OpenAIPromptExecutionSettings settings = new() + { + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions, + Temperature = 0.7, + MaxTokens = 2048 + }; + + ChatHistory chatHistory = new(); + chatHistory.AddSystemMessage(_formCompletionSystemPrompt + .Replace("{{current_form_state}}", JsonSerializer.Serialize(_state!.newCustomerForm.CopyWithDefaultValues(), _jsonOptions))); + chatHistory.AddUserMessage(userMessage); + IChatCompletionService chatService = kernel.Services.GetRequiredService(); + ChatMessageContent response = await chatService.GetChatMessageContentAsync(chatHistory, settings, kernel).ConfigureAwait(false); + var assistantResponse = ""; + + if (response != null) + { + assistantResponse = response.Items[0].ToString(); + // Keeping track of all assistant interactions + _state?.conversation.Add(new ChatMessageContent { Role = AuthorRole.Assistant, Content = assistantResponse }); + } + + if (_state?.newCustomerForm != null && _state.newCustomerForm.IsFormCompleted()) + { + // All user information is gathered to proceed to the next step + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.NewCustomerFormCompleted, Data = _state?.newCustomerForm }); + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.CustomerInteractionTranscriptReady, Data = _state?.conversation }); + return; + } + + // emit event: NewCustomerFormNeedsMoreDetails + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.NewCustomerFormNeedsMoreDetails, Data = assistantResponse }); + } + + [Description("User provided details of first name")] + private Task OnUserProvidedFirstName(string firstName) + { + if (!string.IsNullOrEmpty(firstName) && _state != null) + { + _state.newCustomerForm.UserFirstName = firstName; + } + + return Task.CompletedTask; + } + + [Description("User provided details of last name")] + private Task OnUserProvidedLastName(string lastName) + { + if (!string.IsNullOrEmpty(lastName) && _state != null) + { + _state.newCustomerForm.UserLastName = lastName; + } + + return Task.CompletedTask; + } + + [Description("User provided details of USA State the user lives in, must be in 2-letter Uppercase State Abbreviation format")] + private Task OnUserProvidedStateOfResidence(string stateAbbreviation) + { + if (!string.IsNullOrEmpty(stateAbbreviation) && _state != null) + { + _state.newCustomerForm.UserState = stateAbbreviation; + } + + return Task.CompletedTask; + } + + [Description("User provided details of date of birth, must be in the format MM/DD/YYYY")] + private Task OnUserProvidedDOBDetails(string date) + { + if (!string.IsNullOrEmpty(date) && _state != null) + { + _state.newCustomerForm.UserDateOfBirth = date; + } + + return Task.CompletedTask; + } + + [Description("User provided details of phone number, must be in the format (\\d{3})-\\d{3}-\\d{4}")] + private Task OnUserProvidedPhoneNumber(string phoneNumber) + { + if (!string.IsNullOrEmpty(phoneNumber) && _state != null) + { + _state.newCustomerForm.UserPhoneNumber = phoneNumber; + } + + return Task.CompletedTask; + } + + [Description("User provided details of userId, must be in the format \\d{3}-\\d{3}-\\d{4}")] + private Task OnUserProvidedUserId(string userId) + { + if (!string.IsNullOrEmpty(userId) && _state != null) + { + _state.newCustomerForm.UserId = userId; + } + + return Task.CompletedTask; + } + + [Description("User provided email address, must be in the an email valid format")] + private Task OnUserProvidedEmailAddress(string emailAddress) + { + if (!string.IsNullOrEmpty(emailAddress) && _state != null) + { + _state.newCustomerForm.UserEmail = emailAddress; + } + + return Task.CompletedTask; + } +} + +/// +/// The state object for the +/// +public class NewCustomerFormState +{ + internal NewCustomerForm newCustomerForm { get; set; } = new(); + internal List conversation { get; set; } = []; +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CreditScoreCheckStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CreditScoreCheckStep.cs new file mode 100644 index 000000000000..655902640ac7 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/CreditScoreCheckStep.cs @@ -0,0 +1,34 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates User Credit Score check, based on the date of birth the score will be enough or insufficient +/// +public class CreditScoreCheckStep : KernelProcessStep +{ + public static class Functions + { + public const string DetermineCreditScore = nameof(DetermineCreditScore); + } + + private const int MinCreditScore = 600; + + [KernelFunction(Functions.DetermineCreditScore)] + public async Task DetermineCreditScoreAsync(KernelProcessStepContext context, NewCustomerForm customerDetails, Kernel _kernel) + { + // Placeholder for a call to API to validate credit score with customerDetails + var creditScore = customerDetails.UserDateOfBirth == "02/03/1990" ? 700 : 500; + + if (creditScore >= MinCreditScore) + { + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.CreditScoreCheckApproved, Data = true }); + return; + } + + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.CreditScoreCheckRejected, Data = $"We regret to inform you that your credit score of {creditScore} is insufficient to apply for an account of the type PRIME ABC" }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/FraudDetectionStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/FraudDetectionStep.cs new file mode 100644 index 000000000000..e6fa082f60f7 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/FraudDetectionStep.cs @@ -0,0 +1,34 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates a Fraud detection check, based on the userId the fraud detection will pass or fail. +/// +public class FraudDetectionStep : KernelProcessStep +{ + public static class Functions + { + public const string FraudDetectionCheck = nameof(FraudDetectionCheck); + } + + [KernelFunction(Functions.FraudDetectionCheck)] + public async Task FraudDetectionCheckAsync(KernelProcessStepContext context, bool previousCheckSucceeded, NewCustomerForm customerDetails, Kernel _kernel) + { + // Placeholder for a call to API to validate user details for fraud detection + if (customerDetails.UserId == "123-456-7890") + { + await context.EmitEventAsync(new() + { + Id = AccountOpeningEvents.FraudDetectionCheckFailed, + Data = "We regret to inform you that we found some inconsistent details regarding the information you provided regarding the new account of the type PRIME ABC you applied." + }); + return; + } + + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.FraudDetectionCheckPassed, Data = true }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/MailServiceStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/MailServiceStep.cs new file mode 100644 index 000000000000..b11f782cb201 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/MailServiceStep.cs @@ -0,0 +1,27 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates Mail Service with a message for the user. +/// +public class MailServiceStep : KernelProcessStep +{ + public static class Functions + { + public const string SendMailToUserWithDetails = nameof(SendMailToUserWithDetails); + } + + [KernelFunction(Functions.SendMailToUserWithDetails)] + public async Task SendMailServiceAsync(KernelProcessStepContext context, string message) + { + Console.WriteLine("======== MAIL SERVICE ======== "); + Console.WriteLine(message); + Console.WriteLine("============================== "); + + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.MailServiceSent, Data = message }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/NewAccountStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/NewAccountStep.cs new file mode 100644 index 000000000000..19314a0d0d43 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/NewAccountStep.cs @@ -0,0 +1,65 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates the creation of a new account that triggers other services after a new account id creation +/// +public class NewAccountStep : KernelProcessStep +{ + public static class Functions + { + public const string CreateNewAccount = nameof(CreateNewAccount); + } + + [KernelFunction(Functions.CreateNewAccount)] + public async Task CreateNewAccountAsync(KernelProcessStepContext context, bool previousCheckSucceeded, NewCustomerForm customerDetails, List interactionTranscript, Kernel _kernel) + { + // Placeholder for a call to API to create new account for user + var accountId = new Guid(); + AccountDetails accountDetails = new() + { + UserDateOfBirth = customerDetails.UserDateOfBirth, + UserFirstName = customerDetails.UserFirstName, + UserLastName = customerDetails.UserLastName, + UserId = customerDetails.UserId, + UserPhoneNumber = customerDetails.UserPhoneNumber, + UserState = customerDetails.UserState, + UserEmail = customerDetails.UserEmail, + AccountId = accountId, + AccountType = AccountType.PrimeABC, + }; + + await context.EmitEventAsync(new() + { + Id = AccountOpeningEvents.NewMarketingRecordInfoReady, + Data = new MarketingNewEntryDetails + { + AccountId = accountId, + Name = $"{customerDetails.UserFirstName} {customerDetails.UserLastName}", + PhoneNumber = customerDetails.UserPhoneNumber, + Email = customerDetails.UserEmail, + } + }); + + await context.EmitEventAsync(new() + { + Id = AccountOpeningEvents.CRMRecordInfoReady, + Data = new AccountUserInteractionDetails + { + AccountId = accountId, + UserInteractionType = UserInteractionType.OpeningNewAccount, + InteractionTranscript = interactionTranscript + } + }); + + await context.EmitEventAsync(new() + { + Id = AccountOpeningEvents.NewAccountDetailsReady, + Data = accountDetails, + }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/NewMarketingEntryStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/NewMarketingEntryStep.cs new file mode 100644 index 000000000000..55da96d76a45 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/NewMarketingEntryStep.cs @@ -0,0 +1,24 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates the creation a new marketing user entry. +/// +public class NewMarketingEntryStep : KernelProcessStep +{ + public static class Functions + { + public const string CreateNewMarketingEntry = nameof(CreateNewMarketingEntry); + } + + [KernelFunction(Functions.CreateNewMarketingEntry)] + public async Task CreateNewMarketingEntryAsync(KernelProcessStepContext context, MarketingNewEntryDetails userDetails, Kernel _kernel) + { + // Placeholder for a call to API to create new entry of user for marketing purposes + await context.EmitEventAsync(new() { Id = AccountOpeningEvents.NewMarketingEntryCreated, Data = true }); + } +} diff --git a/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/WelcomePacketStep.cs b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/WelcomePacketStep.cs new file mode 100644 index 000000000000..a316f29cde31 --- /dev/null +++ b/dotnet/samples/GettingStartedWithProcesses/Step02/Steps/WelcomePacketStep.cs @@ -0,0 +1,45 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Step02.Models; + +namespace Step02.Steps; + +/// +/// Mock step that emulates the creation of a Welcome Packet for a new user after account creation +/// +public class WelcomePacketStep : KernelProcessStep +{ + public static class Functions + { + public const string CreateWelcomePacket = nameof(CreateWelcomePacket); + } + + [KernelFunction(Functions.CreateWelcomePacket)] + public async Task CreateWelcomePacketAsync(KernelProcessStepContext context, bool marketingEntryCreated, bool crmRecordCreated, AccountDetails accountDetails, Kernel _kernel) + { + var mailMessage = $""" + Dear {accountDetails.UserFirstName} {accountDetails.UserLastName} + We are thrilled to inform you that you have successfully created a new PRIME ABC Account with us! + + Account Details: + Account Number: {accountDetails.AccountId} + Account Type: {accountDetails.AccountType} + + Please keep this confidential for security purposes. + + Here is the contact information we have in file: + + Email: {accountDetails.UserEmail} + Phone: {accountDetails.UserPhoneNumber} + + Thank you for opening an account with us! + """; + + await context.EmitEventAsync(new() + { + Id = AccountOpeningEvents.WelcomePacketCreated, + Data = mailMessage, + }); + } +} diff --git a/dotnet/samples/LearnResources/LearnResources.csproj b/dotnet/samples/LearnResources/LearnResources.csproj index d210f8effa91..d639fc8a0cee 100644 --- a/dotnet/samples/LearnResources/LearnResources.csproj +++ b/dotnet/samples/LearnResources/LearnResources.csproj @@ -51,6 +51,7 @@ + @@ -68,6 +69,6 @@ - + \ No newline at end of file diff --git a/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs b/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs index a56e6591f8ad..d957358cac77 100644 --- a/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs +++ b/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs @@ -45,25 +45,11 @@ public async Task RunAsync() .Build(); // - // You could instead create a kernel with a legacy Azure OpenAI text completion service - // - kernel = Kernel.CreateBuilder() - .AddAzureOpenAITextGeneration(textModelId, endpoint, apiKey) - .Build(); - // - // You can also create a kernel with a (non-Azure) OpenAI chat completion service // kernel = Kernel.CreateBuilder() .AddOpenAIChatCompletion(openAImodelId, openAIapiKey) .Build(); // - - // Or a kernel with a legacy OpenAI text completion service - // - kernel = Kernel.CreateBuilder() - .AddOpenAITextGeneration(openAItextModelId, openAIapiKey) - .Build(); - // } } diff --git a/dotnet/samples/LearnResources/MicrosoftLearn/CreatingFunctions.cs b/dotnet/samples/LearnResources/MicrosoftLearn/CreatingFunctions.cs index 7676f8701804..36e5fa4f7b53 100644 --- a/dotnet/samples/LearnResources/MicrosoftLearn/CreatingFunctions.cs +++ b/dotnet/samples/LearnResources/MicrosoftLearn/CreatingFunctions.cs @@ -62,7 +62,7 @@ public async Task RunAsync() // Enable auto function calling OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Get the response from the AI diff --git a/dotnet/samples/LearnResources/MicrosoftLearn/Planner.cs b/dotnet/samples/LearnResources/MicrosoftLearn/Planner.cs index 316ae9164e7e..3c6b3f6bcf17 100644 --- a/dotnet/samples/LearnResources/MicrosoftLearn/Planner.cs +++ b/dotnet/samples/LearnResources/MicrosoftLearn/Planner.cs @@ -56,7 +56,7 @@ public async Task RunAsync() // Enable auto function calling OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Get the response from the AI diff --git a/dotnet/samples/LearnResources/MicrosoftLearn/Plugin.cs b/dotnet/samples/LearnResources/MicrosoftLearn/Plugin.cs index a48e6403a8b7..e1a4d3ca6bec 100644 --- a/dotnet/samples/LearnResources/MicrosoftLearn/Plugin.cs +++ b/dotnet/samples/LearnResources/MicrosoftLearn/Plugin.cs @@ -59,7 +59,7 @@ public async Task RunAsync() // Enable auto function calling OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new() { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; // Get the response from the AI diff --git a/dotnet/samples/LearnResources/Plugins/GitHub/GitHubModels.cs b/dotnet/samples/LearnResources/Plugins/GitHub/GitHubModels.cs new file mode 100644 index 000000000000..579933e32ed7 --- /dev/null +++ b/dotnet/samples/LearnResources/Plugins/GitHub/GitHubModels.cs @@ -0,0 +1,90 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Plugins; + +/// +/// Models for GitHub REST API GET responses: +/// https://docs.github.com/en/rest +/// +internal static class GitHubModels +{ + public sealed class Repo + { + [JsonPropertyName("id")] + public long Id { get; set; } + + [JsonPropertyName("full_name")] + public string Name { get; set; } + + [JsonPropertyName("description")] + public string Description { get; set; } + + [JsonPropertyName("html_url")] + public string Url { get; set; } + } + + public sealed class User + { + [JsonPropertyName("id")] + public long Id { get; set; } + + [JsonPropertyName("login")] + public string Login { get; set; } + + [JsonPropertyName("name")] + public string Name { get; set; } + + [JsonPropertyName("company")] + public string Company { get; set; } + + [JsonPropertyName("html_url")] + public string Url { get; set; } + } + + public class Issue + { + [JsonPropertyName("id")] + public long Id { get; set; } + + [JsonPropertyName("number")] + public int Number { get; set; } + + [JsonPropertyName("html_url")] + public string Url { get; set; } + + [JsonPropertyName("title")] + public string Title { get; set; } + + [JsonPropertyName("state")] + public string State { get; set; } + + [JsonPropertyName("labels")] + public Label[] Labels { get; set; } + + [JsonPropertyName("created_at")] + public string WhenCreated { get; set; } + + [JsonPropertyName("closed_at")] + public string WhenClosed { get; set; } + } + + public sealed class IssueDetail : Issue + { + [JsonPropertyName("body")] + public string Body { get; set; } + } + + public sealed class Label + { + [JsonPropertyName("id")] + public long Id { get; set; } + + [JsonPropertyName("name")] + public string Name { get; set; } + + [JsonPropertyName("description")] + public string Description { get; set; } + } +} diff --git a/dotnet/samples/LearnResources/Plugins/GitHub/GitHubPlugin.cs b/dotnet/samples/LearnResources/Plugins/GitHub/GitHubPlugin.cs new file mode 100644 index 000000000000..56333898e467 --- /dev/null +++ b/dotnet/samples/LearnResources/Plugins/GitHub/GitHubPlugin.cs @@ -0,0 +1,107 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ComponentModel; +using System.Text.Json; +using Microsoft.SemanticKernel; + +namespace Plugins; + +internal sealed class GitHubSettings +{ + public string BaseUrl { get; set; } = "https://api.github.com"; + + public string Token { get; set; } = string.Empty; +} + +internal sealed class GitHubPlugin(GitHubSettings settings) +{ + [KernelFunction] + public async Task GetUserProfileAsync() + { + using HttpClient client = this.CreateClient(); + JsonDocument response = await MakeRequestAsync(client, "/user"); + return response.Deserialize() ?? throw new InvalidDataException($"Request failed: {nameof(GetUserProfileAsync)}"); + } + + [KernelFunction] + public async Task GetRepositoryAsync(string organization, string repo) + { + using HttpClient client = this.CreateClient(); + JsonDocument response = await MakeRequestAsync(client, $"/repos/{organization}/{repo}"); + + return response.Deserialize() ?? throw new InvalidDataException($"Request failed: {nameof(GetRepositoryAsync)}"); + } + + [KernelFunction] + public async Task GetIssuesAsync( + string organization, + string repo, + [Description("default count is 30")] + int? maxResults = null, + [Description("open, closed, or all")] + string state = "", + string label = "", + string assignee = "") + { + using HttpClient client = this.CreateClient(); + + string path = $"/repos/{organization}/{repo}/issues?"; + path = BuildQuery(path, "state", state); + path = BuildQuery(path, "assignee", assignee); + path = BuildQuery(path, "labels", label); + path = BuildQuery(path, "per_page", maxResults?.ToString() ?? string.Empty); + + JsonDocument response = await MakeRequestAsync(client, path); + + return response.Deserialize() ?? throw new InvalidDataException($"Request failed: {nameof(GetIssuesAsync)}"); + } + + [KernelFunction] + public async Task GetIssueDetailAsync(string organization, string repo, int issueId) + { + using HttpClient client = this.CreateClient(); + + string path = $"/repos/{organization}/{repo}/issues/{issueId}"; + + JsonDocument response = await MakeRequestAsync(client, path); + + return response.Deserialize() ?? throw new InvalidDataException($"Request failed: {nameof(GetIssueDetailAsync)}"); + } + + private HttpClient CreateClient() + { + HttpClient client = new() + { + BaseAddress = new Uri(settings.BaseUrl) + }; + + client.DefaultRequestHeaders.Clear(); + client.DefaultRequestHeaders.Add("User-Agent", "request"); + client.DefaultRequestHeaders.Add("Accept", "application/vnd.github+json"); + client.DefaultRequestHeaders.Add("Authorization", $"Bearer {settings.Token}"); + client.DefaultRequestHeaders.Add("X-GitHub-Api-Version", "2022-11-28"); + + return client; + } + + private static string BuildQuery(string path, string key, string value) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return $"{path}{key}={value}&"; + } + + return path; + } + + private static async Task MakeRequestAsync(HttpClient client, string path) + { + Console.WriteLine($"REQUEST: {path}"); + Console.WriteLine(); + + HttpResponseMessage response = await client.GetAsync(new Uri(path)); + response.EnsureSuccessStatusCode(); + string content = await response.Content.ReadAsStringAsync(); + return JsonDocument.Parse(content); + } +} diff --git a/dotnet/samples/LearnResources/Resources/Grimms-The-King-of-the-Golden-Mountain.txt b/dotnet/samples/LearnResources/Resources/Grimms-The-King-of-the-Golden-Mountain.txt new file mode 100644 index 000000000000..a5279d6e8c51 --- /dev/null +++ b/dotnet/samples/LearnResources/Resources/Grimms-The-King-of-the-Golden-Mountain.txt @@ -0,0 +1,36 @@ +The King of the Golden Mountain +By the Grimm Brothers + +There was once a merchant who had only one child, a son, that was very young, and barely able to run alone. He had two richly laden ships then making a voyage upon the seas, in which he had embarked all his wealth, in the hope of making great gains, when the news came that both were lost. Thus from being a rich man he became all at once so very poor that nothing was left to him but one small plot of land; and there he often went in an evening to take his walk, and ease his mind of a little of his trouble. + +One day, as he was roaming along in a brown study, thinking with no great comfort on what he had been and what he now was, and was like to be, all on a sudden there stood before him a little, rough-looking, black dwarf. โ€™Prithee, friend, why so sorrowful?โ€™ said he to the merchant; โ€™what is it you take so deeply to heart?โ€™ โ€™If you would do me any good I would willingly tell you,โ€™ said the merchant. โ€™Who knows but I may?โ€™ said the little man: โ€™tell me what ails you, and perhaps you will find I may be of some use.โ€™ Then the merchant told him how all his wealth was gone to the bottom of the sea, and how he had nothing left but that little plot of land. โ€™Oh, trouble not yourself about that,โ€™ said the dwarf; โ€™only undertake to bring me here, twelve years hence, whatever meets you first on your going home, and I will give you as much as you please.โ€™ The merchant thought this was no great thing to ask; that it would most likely be his dog or his cat, or something of that sort, but forgot his little boy Heinel; so he agreed to the bargain, and signed and sealed the bond to do what was asked of him. + +But as he drew near home, his little boy was so glad to see him that he crept behind him, and laid fast hold of his legs, and looked up in his face and laughed. Then the father started, trembling with fear and horror, and saw what it was that he had bound himself to do; but as no gold was come, he made himself easy by thinking that it was only a joke that the dwarf was playing him, and that, at any rate, when the money came, he should see the bearer, and would not take it in. + +About a month afterwards he went upstairs into a lumber-room to look for some old iron, that he might sell it and raise a little money; and there, instead of his iron, he saw a large pile of gold lying on the floor. At the sight of this he was overjoyed, and forgetting all about his son, went into trade again, and became a richer merchant than before. + +Meantime little Heinel grew up, and as the end of the twelve years drew near the merchant began to call to mind his bond, and became very sad and thoughtful; so that care and sorrow were written upon his face. The boy one day asked what was the matter, but his father would not tell for some time; at last, however, he said that he had, without knowing it, sold him for gold to a little, ugly-looking, black dwarf, and that the twelve years were coming round when he must keep his word. Then Heinel said, โ€™Father, give yourself very little trouble about that; I shall be too much for the little man.โ€™ + +When the time came, the father and son went out together to the place agreed upon: and the son drew a circle on the ground, and set himself and his father in the middle of it. The little black dwarf soon came, and walked round and round about the circle, but could not find any way to get into it, and he either could not, or dared not, jump over it. At last the boy said to him. โ€™Have you anything to say to us, my friend, or what do you want?โ€™ Now Heinel had found a friend in a good fairy, that was fond of him, and had told him what to do; for this fairy knew what good luck was in store for him. โ€™Have you brought me what you said you would?โ€™ said the dwarf to the merchant. The old man held his tongue, but Heinel said again, โ€™What do you want here?โ€™ The dwarf said, โ€™I come to talk with your father, not with you.โ€™ โ€™You have cheated and taken in my father,โ€™ said the son; โ€™pray give him up his bond at once.โ€™ โ€™Fair and softly,โ€™ said the little old man; โ€™right is right; I have paid my money, and your father has had it, and spent it; so be so good as to let me have what I paid it for.โ€™ โ€™You must have my consent to that first,โ€™ said Heinel, โ€™so please to step in here, and let us talk it over.โ€™ The old man grinned, and showed his teeth, as if he should have been very glad to get into the circle if he could. Then at last, after a long talk, they came to terms. Heinel agreed that his father must give him up, and that so far the dwarf should have his way: but, on the other hand, the fairy had told Heinel what fortune was in store for him, if he followed his own course; and he did not choose to be given up to his hump-backed friend, who seemed so anxious for his company. + +So, to make a sort of drawn battle of the matter, it was settled that Heinel should be put into an open boat, that lay on the sea-shore hard by; that the father should push him off with his own hand, and that he should thus be set adrift, and left to the bad or good luck of wind and weather. Then he took leave of his father, and set himself in the boat, but before it got far off a wave struck it, and it fell with one side low in the water, so the merchant thought that poor Heinel was lost, and went home very sorrowful, while the dwarf went his way, thinking that at any rate he had had his revenge. + +The boat, however, did not sink, for the good fairy took care of her friend, and soon raised the boat up again, and it went safely on. The young man sat safe within, till at length it ran ashore upon an unknown land. As he jumped upon the shore he saw before him a beautiful castle but empty and dreary within, for it was enchanted. โ€™Here,โ€™ said he to himself, โ€™must I find the prize the good fairy told me of.โ€™ So he once more searched the whole palace through, till at last he found a white snake, lying coiled up on a cushion in one of the chambers. + +Now the white snake was an enchanted princess; and she was very glad to see him, and said, โ€™Are you at last come to set me free? Twelve long years have I waited here for the fairy to bring you hither as she promised, for you alone can save me. This night twelve men will come: their faces will be black, and they will be dressed in chain armour. They will ask what you do here, but give no answer; and let them do what they willโ€“beat, whip, pinch, prick, or torment youโ€“bear all; only speak not a word, and at twelve oโ€™clock they must go away. The second night twelve others will come: and the third night twenty-four, who will even cut off your head; but at the twelfth hour of that night their power is gone, and I shall be free, and will come and bring you the Water of Life, and will wash you with it, and bring you back to life and health.โ€™ And all came to pass as she had said; Heinel bore all, and spoke not a word; and the third night the princess came, and fell on his neck and kissed him. Joy and gladness burst forth throughout the castle, the wedding was celebrated, and he was crowned king of the Golden Mountain. + +They lived together very happily, and the queen had a son. And thus eight years had passed over their heads, when the king thought of his father; and he began to long to see him once again. But the queen was against his going, and said, โ€™I know well that misfortunes will come upon us if you go.โ€™ However, he gave her no rest till she agreed. At his going away she gave him a wishing-ring, and said, โ€™Take this ring, and put it on your finger; whatever you wish it will bring you; only promise never to make use of it to bring me hence to your fatherโ€™s house.โ€™ Then he said he would do what she asked, and put the ring on his finger, and wished himself near the town where his father lived. + +Heinel found himself at the gates in a moment; but the guards would not let him go in, because he was so strangely clad. So he went up to a neighbouring hill, where a shepherd dwelt, and borrowed his old frock, and thus passed unknown into the town. When he came to his fatherโ€™s house, he said he was his son; but the merchant would not believe him, and said he had had but one son, his poor Heinel, who he knew was long since dead: and as he was only dressed like a poor shepherd, he would not even give him anything to eat. The king, however, still vowed that he was his son, and said, โ€™Is there no mark by which you would know me if I am really your son?โ€™ โ€™Yes,โ€™ said his mother, โ€™our Heinel had a mark like a raspberry on his right arm.โ€™ Then he showed them the mark, and they knew that what he had said was true. + +He next told them how he was king of the Golden Mountain, and was married to a princess, and had a son seven years old. But the merchant said, โ€™that can never be true; he must be a fine king truly who travels about in a shepherdโ€™s frock!โ€™ At this the son was vexed; and forgetting his word, turned his ring, and wished for his queen and son. In an instant they stood before him; but the queen wept, and said he had broken his word, and bad luck would follow. He did all he could to soothe her, and she at last seemed to be appeased; but she was not so in truth, and was only thinking how she should punish him. + +One day he took her to walk with him out of the town, and showed her the spot where the boat was set adrift upon the wide waters. Then he sat himself down, and said, โ€™I am very much tired; sit by me, I will rest my head in your lap, and sleep a while.โ€™ As soon as he had fallen asleep, however, she drew the ring from his finger, and crept softly away, and wished herself and her son at home in their kingdom. And when he awoke he found himself alone, and saw that the ring was gone from his finger. โ€™I can never go back to my fatherโ€™s house,โ€™ said he; โ€™they would say I am a sorcerer: I will journey forth into the world, till I come again to my kingdom.โ€™ + +So saying he set out and travelled till he came to a hill, where three giants were sharing their fatherโ€™s goods; and as they saw him pass they cried out and said, โ€™Little men have sharp wits; he shall part the goods between us.โ€™ Now there was a sword that cut off an enemyโ€™s head whenever the wearer gave the words, โ€™Heads off!โ€™; a cloak that made the owner invisible, or gave him any form he pleased; and a pair of boots that carried the wearer wherever he wished. Heinel said they must first let him try these wonderful things, then he might know how to set a value upon them. Then they gave him the cloak, and he wished himself a fly, and in a moment he was a fly. โ€™The cloak is very well,โ€™ said he: โ€™now give me the sword.โ€™ โ€™No,โ€™ said they; โ€™not unless you undertake not to say, โ€œHeads off!โ€ for if you do we are all dead men.โ€™ So they gave it him, charging him to try it on a tree. He next asked for the boots also; and the moment he had all three in his power, he wished himself at the Golden Mountain; and there he was at once. So the giants were left behind with no goods to share or quarrel about. + +As Heinel came near his castle he heard the sound of merry music; and the people around told him that his queen was about to marry another husband. Then he threw his cloak around him, and passed through the castle hall, and placed himself by the side of the queen, where no one saw him. But when anything to eat was put upon her plate, he took it away and ate it himself; and when a glass of wine was handed to her, he took it and drank it; and thus, though they kept on giving her meat and drink, her plate and cup were always empty. + +Upon this, fear and remorse came over her, and she went into her chamber alone, and sat there weeping; and he followed her there. โ€™Alas!โ€™ said she to herself, โ€™was I not once set free? Why then does this enchantment still seem to bind me?โ€™ + +โ€™False and fickle one!โ€™ said he. โ€™One indeed came who set thee free, and he is now near thee again; but how have you used him? Ought he to have had such treatment from thee?โ€™ Then he went out and sent away the company, and said the wedding was at an end, for that he was come back to the kingdom. But the princes, peers, and great men mocked at him. However, he would enter into no parley with them, but only asked them if they would go in peace or not. Then they turned upon him and tried to seize him; but he drew his sword. โ€™Heads Off!โ€™ cried he; and with the word the traitorsโ€™ heads fell before him, and Heinel was once more king of the Golden Mountain. \ No newline at end of file diff --git a/dotnet/samples/LearnResources/Resources/Grimms-The-Water-of-Life.txt b/dotnet/samples/LearnResources/Resources/Grimms-The-Water-of-Life.txt new file mode 100644 index 000000000000..3a5487beb5cc --- /dev/null +++ b/dotnet/samples/LearnResources/Resources/Grimms-The-Water-of-Life.txt @@ -0,0 +1,44 @@ +The Water of Life +By the Grimm Brothers + +Long before you or I were born, there reigned, in a country a great way off, a king who had three sons. This king once fell very illโ€“so ill that nobody thought he could live. His sons were very much grieved at their fatherโ€™s sickness; and as they were walking together very mournfully in the garden of the palace, a little old man met them and asked what was the matter. They told him that their father was very ill, and that they were afraid nothing could save him. โ€™I know what would,โ€™ said the little old man; โ€™it is the Water of Life. If he could have a draught of it he would be well again; but it is very hard to get.โ€™ Then the eldest son said, โ€™I will soon find itโ€™: and he went to the sick king, and begged that he might go in search of the Water of Life, as it was the only thing that could save him. โ€™No,โ€™ said the king. โ€™I had rather die than place you in such great danger as you must meet with in your journey.โ€™ But he begged so hard that the king let him go; and the prince thought to himself, โ€™If I bring my father this water, he will make me sole heir to his kingdom.โ€™ + +Then he set out: and when he had gone on his way some time he came to a deep valley, overhung with rocks and woods; and as he looked around, he saw standing above him on one of the rocks a little ugly dwarf, with a sugarloaf cap and a scarlet cloak; and the dwarf called to him and said, โ€™Prince, whither so fast?โ€™ โ€™What is that to thee, you ugly imp?โ€™ said the prince haughtily, and rode on. + +But the dwarf was enraged at his behaviour, and laid a fairy spell of ill-luck upon him; so that as he rode on the mountain pass became narrower and narrower, and at last the way was so straitened that he could not go to step forward: and when he thought to have turned his horse round and go back the way he came, he heard a loud laugh ringing round him, and found that the path was closed behind him, so that he was shut in all round. He next tried to get off his horse and make his way on foot, but again the laugh rang in his ears, and he found himself unable to move a step, and thus he was forced to abide spellbound. + +Meantime the old king was lingering on in daily hope of his sonโ€™s return, till at last the second son said, โ€™Father, I will go in search of the Water of Life.โ€™ For he thought to himself, โ€™My brother is surely dead, and the kingdom will fall to me if I find the water.โ€™ The king was at first very unwilling to let him go, but at last yielded to his wish. So he set out and followed the same road which his brother had done, and met with the same elf, who stopped him at the same spot in the mountains, saying, as before, โ€™Prince, prince, whither so fast?โ€™ โ€™Mind your own affairs, busybody!โ€™ said the prince scornfully, and rode on. + +But the dwarf put the same spell upon him as he put on his elder brother, and he, too, was at last obliged to take up his abode in the heart of the mountains. Thus it is with proud silly people, who think themselves above everyone else, and are too proud to ask or take advice. + +When the second prince had thus been gone a long time, the youngest son said he would go and search for the Water of Life, and trusted he should soon be able to make his father well again. So he set out, and the dwarf met him too at the same spot in the valley, among the mountains, and said, โ€™Prince, whither so fast?โ€™ And the prince said, โ€™I am going in search of the Water of Life, because my father is ill, and like to die: can you help me? Pray be kind, and aid me if you can!โ€™ โ€™Do you know where it is to be found?โ€™ asked the dwarf. โ€™No,โ€™ said the prince, โ€™I do not. Pray tell me if you know.โ€™ โ€™Then as you have spoken to me kindly, and are wise enough to seek for advice, I will tell you how and where to go. The water you seek springs from a well in an enchanted castle; and, that you may be able to reach it in safety, I will give you an iron wand and two little loaves of bread; strike the iron door of the castle three times with the wand, and it will open: two hungry lions will be lying down inside gaping for their prey, but if you throw them the bread they will let you pass; then hasten on to the well, and take some of the Water of Life before the clock strikes twelve; for if you tarry longer the door will shut upon you for ever.โ€™ + +Then the prince thanked his little friend with the scarlet cloak for his friendly aid, and took the wand and the bread, and went travelling on and on, over sea and over land, till he came to his journeyโ€™s end, and found everything to be as the dwarf had told him. The door flew open at the third stroke of the wand, and when the lions were quieted he went on through the castle and came at length to a beautiful hall. Around it he saw several knights sitting in a trance; then he pulled off their rings and put them on his own fingers. In another room he saw on a table a sword and a loaf of bread, which he also took. Further on he came to a room where a beautiful young lady sat upon a couch; and she welcomed him joyfully, and said, if he would set her free from the spell that bound her, the kingdom should be his, if he would come back in a year and marry her. Then she told him that the well that held the Water of Life was in the palace gardens; and bade him make haste, and draw what he wanted before the clock struck twelve. + +He walked on; and as he walked through beautiful gardens he came to a delightful shady spot in which stood a couch; and he thought to himself, as he felt tired, that he would rest himself for a while, and gaze on the lovely scenes around him. So he laid himself down, and sleep fell upon him unawares, so that he did not wake up till the clock was striking a quarter to twelve. Then he sprang from the couch dreadfully frightened, ran to the well, filled a cup that was standing by him full of water, and hastened to get away in time. Just as he was going out of the iron door it struck twelve, and the door fell so quickly upon him that it snapped off a piece of his heel. + +When he found himself safe, he was overjoyed to think that he had got the Water of Life; and as he was going on his way homewards, he passed by the little dwarf, who, when he saw the sword and the loaf, said, โ€™You have made a noble prize; with the sword you can at a blow slay whole armies, and the bread will never fail you.โ€™ Then the prince thought to himself, โ€™I cannot go home to my father without my brothersโ€™; so he said, โ€™My dear friend, cannot you tell me where my two brothers are, who set out in search of the Water of Life before me, and never came back?โ€™ โ€™I have shut them up by a charm between two mountains,โ€™ said the dwarf, โ€™because they were proud and ill-behaved, and scorned to ask advice.โ€™ The prince begged so hard for his brothers, that the dwarf at last set them free, though unwillingly, saying, โ€™Beware of them, for they have bad hearts.โ€™ Their brother, however, was greatly rejoiced to see them, and told them all that had happened to him; how he had found the Water of Life, and had taken a cup full of it; and how he had set a beautiful princess free from a spell that bound her; and how she had engaged to wait a whole year, and then to marry him, and to give him the kingdom. + +Then they all three rode on together, and on their way home came to a country that was laid waste by war and a dreadful famine, so that it was feared all must die for want. But the prince gave the king of the land the bread, and all his kingdom ate of it. And he lent the king the wonderful sword, and he slew the enemyโ€™s army with it; and thus the kingdom was once more in peace and plenty. In the same manner he befriended two other countries through which they passed on their way. + +When they came to the sea, they got into a ship and during their voyage the two eldest said to themselves, โ€™Our brother has got the water which we could not find, therefore our father will forsake us and give him the kingdom, which is our rightโ€™; so they were full of envy and revenge, and agreed together how they could ruin him. Then they waited till he was fast asleep, and poured the Water of Life out of the cup, and took it for themselves, giving him bitter sea-water instead. + +When they came to their journeyโ€™s end, the youngest son brought his cup to the sick king, that he might drink and be healed. Scarcely, however, had he tasted the bitter sea-water when he became worse even than he was before; and then both the elder sons came in, and blamed the youngest for what they had done; and said that he wanted to poison their father, but that they had found the Water of Life, and had brought it with them. He no sooner began to drink of what they brought him, than he felt his sickness leave him, and was as strong and well as in his younger days. Then they went to their brother, and laughed at him, and said, โ€™Well, brother, you found the Water of Life, did you? You have had the trouble and we shall have the reward. Pray, with all your cleverness, why did not you manage to keep your eyes open? Next year one of us will take away your beautiful princess, if you do not take care. You had better say nothing about this to our father, for he does not believe a word you say; and if you tell tales, you shall lose your life into the bargain: but be quiet, and we will let you off.โ€™ + +The old king was still very angry with his youngest son, and thought that he really meant to have taken away his life; so he called his court together, and asked what should be done, and all agreed that he ought to be put to death. The prince knew nothing of what was going on, till one day, when the kingโ€™s chief huntsmen went a-hunting with him, and they were alone in the wood together, the huntsman looked so sorrowful that the prince said, โ€™My friend, what is the matter with you?โ€™ โ€™I cannot and dare not tell you,โ€™ said he. But the prince begged very hard, and said, โ€™Only tell me what it is, and do not think I shall be angry, for I will forgive you.โ€™ โ€™Alas!โ€™ said the huntsman; โ€™the king has ordered me to shoot you.โ€™ The prince started at this, and said, โ€™Let me live, and I will change dresses with you; you shall take my royal coat to show to my father, and do you give me your shabby one.โ€™ โ€™With all my heart,โ€™ said the huntsman; โ€™I am sure I shall be glad to save you, for I could not have shot you.โ€™ Then he took the princeโ€™s coat, and gave him the shabby one, and went away through the wood. + +Some time after, three grand embassies came to the old kingโ€™s court, with rich gifts of gold and precious stones for his youngest son; now all these were sent from the three kings to whom he had lent his sword and loaf of bread, in order to rid them of their enemy and feed their people. This touched the old kingโ€™s heart, and he thought his son might still be guiltless, and said to his court, โ€™O that my son were still alive! how it grieves me that I had him killed!โ€™ โ€™He is still alive,โ€™ said the huntsman; โ€™and I am glad that I had pity on him, but let him go in peace, and brought home his royal coat.โ€™ At this the king was overwhelmed with joy, and made it known throughout all his kingdom, that if his son would come back to his court he would forgive him. + +Meanwhile the princess was eagerly waiting till her deliverer should come back; and had a road made leading up to her palace all of shining gold; and told her courtiers that whoever came on horseback, and rode straight up to the gate upon it, was her true lover; and that they must let him in: but whoever rode on one side of it, they must be sure was not the right one; and that they must send him away at once. + +The time soon came, when the eldest brother thought that he would make haste to go to the princess, and say that he was the one who had set her free, and that he should have her for his wife, and the kingdom with her. As he came before the palace and saw the golden road, he stopped to look at it, and he thought to himself, โ€™It is a pity to ride upon this beautiful roadโ€™; so he turned aside and rode on the right-hand side of it. But when he came to the gate, the guards, who had seen the road he took, said to him, he could not be what he said he was, and must go about his business. + +The second prince set out soon afterwards on the same errand; and when he came to the golden road, and his horse had set one foot upon it, he stopped to look at it, and thought it very beautiful, and said to himself, โ€™What a pity it is that anything should tread here!โ€™ Then he too turned aside and rode on the left side of it. But when he came to the gate the guards said he was not the true prince, and that he too must go away about his business; and away he went. + +Now when the full year was come round, the third brother left the forest in which he had lain hid for fear of his fatherโ€™s anger, and set out in search of his betrothed bride. So he journeyed on, thinking of her all the way, and rode so quickly that he did not even see what the road was made of, but went with his horse straight over it; and as he came to the gate it flew open, and the princess welcomed him with joy, and said he was her deliverer, and should now be her husband and lord of the kingdom. When the first joy at their meeting was over, the princess told him she had heard of his father having forgiven him, and of his wish to have him home again: so, before his wedding with the princess, he went to visit his father, taking her with him. Then he told him everything; how his brothers had cheated and robbed him, and yet that he had borne all those wrongs for the love of his father. And the old king was very angry, and wanted to punish his wicked sons; but they made their escape, and got into a ship and sailed away over the wide sea, and where they went to nobody knew and nobody cared. + +And now the old king gathered together his court, and asked all his kingdom to come and celebrate the wedding of his son and the princess. And young and old, noble and squire, gentle and simple, came at once on the summons; and among the rest came the friendly dwarf, with the sugarloaf hat, and a new scarlet cloak. + + And the wedding was held, and the merry bells run. + And all the good people they danced and they sung, + And feasted and frolickโ€™d I canโ€™t tell how long. \ No newline at end of file diff --git a/dotnet/samples/LearnResources/Resources/Grimms-The-White-Snake.txt b/dotnet/samples/LearnResources/Resources/Grimms-The-White-Snake.txt new file mode 100644 index 000000000000..75ff737109f1 --- /dev/null +++ b/dotnet/samples/LearnResources/Resources/Grimms-The-White-Snake.txt @@ -0,0 +1,28 @@ +The White Snake +By the Grimm Brothers + +A long time ago there lived a king who was famed for his wisdom through all the land. Nothing was hidden from him, and it seemed as if news of the most secret things was brought to him through the air. But he had a strange custom; every day after dinner, when the table was cleared, and no one else was present, a trusty servant had to bring him one more dish. It was covered, however, and even the servant did not know what was in it, neither did anyone know, for the king never took off the cover to eat of it until he was quite alone. + +This had gone on for a long time, when one day the servant, who took away the dish, was overcome with such curiosity that he could not help carrying the dish into his room. When he had carefully locked the door, he lifted up the cover, and saw a white snake lying on the dish. But when he saw it he could not deny himself the pleasure of tasting it, so he cut of a little bit and put it into his mouth. No sooner had it touched his tongue than he heard a strange whispering of little voices outside his window. He went and listened, and then noticed that it was the sparrows who were chattering together, and telling one another of all kinds of things which they had seen in the fields and woods. Eating the snake had given him power of understanding the language of animals. + +Now it so happened that on this very day the queen lost her most beautiful ring, and suspicion of having stolen it fell upon this trusty servant, who was allowed to go everywhere. The king ordered the man to be brought before him, and threatened with angry words that unless he could before the morrow point out the thief, he himself should be looked upon as guilty and executed. In vain he declared his innocence; he was dismissed with no better answer. + +In his trouble and fear he went down into the courtyard and took thought how to help himself out of his trouble. Now some ducks were sitting together quietly by a brook and taking their rest; and, whilst they were making their feathers smooth with their bills, they were having a confidential conversation together. The servant stood by and listened. They were telling one another of all the places where they had been waddling about all the morning, and what good food they had found; and one said in a pitiful tone: โ€™Something lies heavy on my stomach; as I was eating in haste I swallowed a ring which lay under the queenโ€™s window.โ€™ The servant at once seized her by the neck, carried her to the kitchen, and said to the cook: โ€™Here is a fine duck; pray, kill her.โ€™ โ€™Yes,โ€™ said the cook, and weighed her in his hand; โ€™she has spared no trouble to fatten herself, and has been waiting to be roasted long enough.โ€™ So he cut off her head, and as she was being dressed for the spit, the queenโ€™s ring was found inside her. + +The servant could now easily prove his innocence; and the king, to make amends for the wrong, allowed him to ask a favour, and promised him the best place in the court that he could wish for. The servant refused everything, and only asked for a horse and some money for travelling, as he had a mind to see the world and go about a little. When his request was granted he set out on his way, and one day came to a pond, where he saw three fishes caught in the reeds and gasping for water. Now, though it is said that fishes are dumb, he heard them lamenting that they must perish so miserably, and, as he had a kind heart, he got off his horse and put the three prisoners back into the water. They leapt with delight, put out their heads, and cried to him: โ€™We will remember you and repay you for saving us!โ€™ + +He rode on, and after a while it seemed to him that he heard a voice in the sand at his feet. He listened, and heard an ant-king complain: โ€™Why cannot folks, with their clumsy beasts, keep off our bodies? That stupid horse, with his heavy hoofs, has been treading down my people without mercy!โ€™ So he turned on to a side path and the ant-king cried out to him: โ€™We will remember youโ€“one good turn deserves another!โ€™ + +The path led him into a wood, and there he saw two old ravens standing by their nest, and throwing out their young ones. โ€™Out with you, you idle, good-for-nothing creatures!โ€™ cried they; โ€™we cannot find food for you any longer; you are big enough, and can provide for yourselves.โ€™ But the poor young ravens lay upon the ground, flapping their wings, and crying: โ€™Oh, what helpless chicks we are! We must shift for ourselves, and yet we cannot fly! What can we do, but lie here and starve?โ€™ So the good young fellow alighted and killed his horse with his sword, and gave it to them for food. Then they came hopping up to it, satisfied their hunger, and cried: โ€™We will remember youโ€“one good turn deserves another!โ€™ + +And now he had to use his own legs, and when he had walked a long way, he came to a large city. There was a great noise and crowd in the streets, and a man rode up on horseback, crying aloud: โ€™The kingโ€™s daughter wants a husband; but whoever seeks her hand must perform a hard task, and if he does not succeed he will forfeit his life.โ€™ Many had already made the attempt, but in vain; nevertheless when the youth saw the kingโ€™s daughter he was so overcome by her great beauty that he forgot all danger, went before the king, and declared himself a suitor. + +So he was led out to the sea, and a gold ring was thrown into it, before his eyes; then the king ordered him to fetch this ring up from the bottom of the sea, and added: โ€™If you come up again without it you will be thrown in again and again until you perish amid the waves.โ€™ All the people grieved for the handsome youth; then they went away, leaving him alone by the sea. + +He stood on the shore and considered what he should do, when suddenly he saw three fishes come swimming towards him, and they were the very fishes whose lives he had saved. The one in the middle held a mussel in its mouth, which it laid on the shore at the youthโ€™s feet, and when he had taken it up and opened it, there lay the gold ring in the shell. Full of joy he took it to the king and expected that he would grant him the promised reward. + +But when the proud princess perceived that he was not her equal in birth, she scorned him, and required him first to perform another task. She went down into the garden and strewed with her own hands ten sacksful of millet-seed on the grass; then she said: โ€™Tomorrow morning before sunrise these must be picked up, and not a single grain be wanting.โ€™ + +The youth sat down in the garden and considered how it might be possible to perform this task, but he could think of nothing, and there he sat sorrowfully awaiting the break of day, when he should be led to death. But as soon as the first rays of the sun shone into the garden he saw all the ten sacks standing side by side, quite full, and not a single grain was missing. The ant-king had come in the night with thousands and thousands of ants, and the grateful creatures had by great industry picked up all the millet-seed and gathered them into the sacks. + +Presently the kingโ€™s daughter herself came down into the garden, and was amazed to see that the young man had done the task she had given him. But she could not yet conquer her proud heart, and said: โ€™Although he has performed both the tasks, he shall not be my husband until he had brought me an apple from the Tree of Life.โ€™ The youth did not know where the Tree of Life stood, but he set out, and would have gone on for ever, as long as his legs would carry him, though he had no hope of finding it. After he had wandered through three kingdoms, he came one evening to a wood, and lay down under a tree to sleep. But he heard a rustling in the branches, and a golden apple fell into his hand. At the same time three ravens flew down to him, perched themselves upon his knee, and said: โ€™We are the three young ravens whom you saved from starving; when we had grown big, and heard that you were seeking the Golden Apple, we flew over the sea to the end of the world, where the Tree of Life stands, and have brought you the apple.โ€™ The youth, full of joy, set out homewards, and took the Golden Apple to the kingโ€™s beautiful daughter, who had now no more excuses left to make. They cut the Apple of Life in two and ate it together; and then her heart became full of love for him, and they lived in undisturbed happiness to a great age. \ No newline at end of file diff --git a/dotnet/samples/LearnResources/Resources/PopulationByAdmin1.csv b/dotnet/samples/LearnResources/Resources/PopulationByAdmin1.csv new file mode 100644 index 000000000000..7fc6970985da --- /dev/null +++ b/dotnet/samples/LearnResources/Resources/PopulationByAdmin1.csv @@ -0,0 +1,744 @@ +UID,iso2,iso3,code3,Province_State,Country_Region,Lat,Long,Combined_Key,Population +5601,BE,BEL,56,Antwerp,Belgium,51.2195,4.4024,"Antwerp, Belgium",1869730 +5602,BE,BEL,56,Brussels,Belgium,50.8503,4.3517,"Brussels, Belgium",1218255 +5603,BE,BEL,56,East Flanders,Belgium,51.0362,3.7373,"East Flanders, Belgium",1525255 +5604,BE,BEL,56,Flemish Brabant,Belgium,50.9167,4.5833,"Flemish Brabant, Belgium",1155843 +5605,BE,BEL,56,Hainaut,Belgium,50.5257,4.0621,"Hainaut, Belgium",1346840 +5606,BE,BEL,56,Liege,Belgium,50.4496,5.8492,"Liege, Belgium",1109800 +5607,BE,BEL,56,Limburg,Belgium,50.9739,5.342,"Limburg, Belgium",877370 +5608,BE,BEL,56,Luxembourg,Belgium,50.0547,5.4677,"Luxembourg, Belgium",286752 +5609,BE,BEL,56,Namur,Belgium,50.331,4.8221,"Namur, Belgium",495832 +5611,BE,BEL,56,Walloon Brabant,Belgium,50.4,4.35,"Walloon Brabant, Belgium",406019 +5612,BE,BEL,56,West Flanders,Belgium,51.0536,3.1458,"West Flanders, Belgium",1200945 +7601,BR,BRA,76,Acre,Brazil,-9.0238,-70.812,"Acre, Brazil",881935 +7602,BR,BRA,76,Alagoas,Brazil,-9.5713,-36.782,"Alagoas, Brazil",3337357 +7603,BR,BRA,76,Amapa,Brazil,0.902,-52.003,"Amapa, Brazil",845731 +7604,BR,BRA,76,Amazonas,Brazil,-3.4168,-65.8561,"Amazonas, Brazil",4144597 +7605,BR,BRA,76,Bahia,Brazil,-12.5797,-41.7007,"Bahia, Brazil",14873064 +7606,BR,BRA,76,Ceara,Brazil,-5.4984,-39.3206,"Ceara, Brazil",9132078 +7607,BR,BRA,76,Distrito Federal,Brazil,-15.7998,-47.8645,"Distrito Federal, Brazil",3015268 +7608,BR,BRA,76,Espirito Santo,Brazil,-19.1834,-40.3089,"Espirito Santo, Brazil",4018650 +7609,BR,BRA,76,Goias,Brazil,-15.827,-49.8362,"Goias, Brazil",7018354 +7610,BR,BRA,76,Maranhao,Brazil,-4.9609,-45.2744,"Maranhao, Brazil",7075181 +7611,BR,BRA,76,Mato Grosso,Brazil,-12.6819,-56.9211,"Mato Grosso, Brazil",3484466 +7612,BR,BRA,76,Mato Grosso do Sul,Brazil,-20.7722,-54.7852,"Mato Grosso do Sul, Brazil",2778986 +7613,BR,BRA,76,Minas Gerais,Brazil,-18.5122,-44.555,"Minas Gerais, Brazil",21168791 +7614,BR,BRA,76,Para,Brazil,-1.9981,-54.9306,"Para, Brazil",8602865 +7615,BR,BRA,76,Paraiba,Brazil,-7.24,-36.782,"Paraiba, Brazil",4018127 +7616,BR,BRA,76,Parana,Brazil,-25.2521,-52.0215,"Parana, Brazil",11433957 +7617,BR,BRA,76,Pernambuco,Brazil,-8.8137,-36.9541,"Pernambuco, Brazil",9557071 +7618,BR,BRA,76,Piaui,Brazil,-7.7183,-42.7289,"Piaui, Brazil",3273227 +7619,BR,BRA,76,Rio de Janeiro,Brazil,-22.9068,-43.1729,"Rio de Janeiro, Brazil",17264943 +7620,BR,BRA,76,Rio Grande do Norte,Brazil,-5.4026,-36.9541,"Rio Grande do Norte, Brazil",3506853 +7621,BR,BRA,76,Rio Grande do Sul,Brazil,-30.0346,-51.2177,"Rio Grande do Sul, Brazil",11377239 +7622,BR,BRA,76,Rondonia,Brazil,-11.5057,-63.5806,"Rondonia, Brazil",1777225 +7623,BR,BRA,76,Roraima,Brazil,-2.7376,-62.0751,"Roraima, Brazil",605761 +7624,BR,BRA,76,Santa Catarina,Brazil,-27.2423,-50.2189,"Santa Catarina, Brazil",7164788 +7625,BR,BRA,76,Sao Paulo,Brazil,-23.5505,-46.6333,"Sao Paulo, Brazil",45919049 +7626,BR,BRA,76,Sergipe,Brazil,-10.5741,-37.3857,"Sergipe, Brazil",2298696 +7627,BR,BRA,76,Tocantins,Brazil,-10.1753,-48.2982,"Tocantins, Brazil",1572866 +15201,CL,CHL,152,Antofagasta,Chile,-23.6509,-70.3975,"Antofagasta, Chile",607534 +15202,CL,CHL,152,Araucania,Chile,-38.9489,-72.3311,"Araucania, Chile",957224 +15203,CL,CHL,152,Arica y Parinacota,Chile,-18.594,-69.4785,"Arica y Parinacota, Chile",226068 +15204,CL,CHL,152,Atacama,Chile,-27.5661,-70.0503,"Atacama, Chile",288944 +15205,CL,CHL,152,Aysen,Chile,-45.9864,-73.7669,"Aysen, Chile",103158 +15206,CL,CHL,152,Biobio,Chile,-37.4464,-72.1416,"Biobio, Chile",1556805 +15207,CL,CHL,152,Coquimbo,Chile,-29.959,-71.3389,"Coquimbo, Chile",757586 +15208,CL,CHL,152,Los Lagos,Chile,-41.9198,-72.1416,"Los Lagos, Chile",828708 +15209,CL,CHL,152,Los Rios,Chile,-40.231,-72.3311,"Los Rios, Chile",384837 +15210,CL,CHL,152,Magallanes,Chile,-52.368,-70.9863,"Magallanes, Chile",166533 +15211,CL,CHL,152,Maule,Chile,-35.5183,-71.6885,"Maule, Chile",1044950 +15212,CL,CHL,152,Metropolitana,Chile,-33.4376,-70.6505,"Metropolitana, Chile",7112808 +15213,CL,CHL,152,Nuble,Chile,-36.7226,-71.7622,"Nuble, Chile",480609 +15214,CL,CHL,152,OHiggins,Chile,-34.5755,-71.0022,"OHiggins, Chile",914555 +15215,CL,CHL,152,Tarapaca,Chile,-19.9232,-69.5132,"Tarapaca, Chile",330558 +15216,CL,CHL,152,Valparaiso,Chile,-33.0472,-71.6127,"Valparaiso, Chile",1815902 +17001,CO,COL,170,Amazonas,Colombia,-1.4429,-71.5724,"Amazonas, Colombia",76589 +17002,CO,COL,170,Antioquia,Colombia,7.1986,-75.3412,"Antioquia, Colombia",6407102 +17003,CO,COL,170,Arauca,Colombia,7.0762,-70.7105,"Arauca, Colombia",262174 +17004,CO,COL,170,Atlantico,Colombia,10.6966,-74.8741,"Atlantico, Colombia",2535517 +17005,CO,COL,170,Bolivar,Colombia,8.6704,-74.03,"Bolivar, Colombia",2070110 +17006,CO,COL,170,Boyaca,Colombia,5.4545,-73.362,"Boyaca, Colombia",1217376 +17007,CO,COL,170,Caldas,Colombia,5.2983,-75.2479,"Caldas, Colombia",998255 +17008,CO,COL,170,Capital District,Colombia,4.711,-74.0721,"Capital District, Colombia",7412566 +17009,CO,COL,170,Caqueta,Colombia,0.8699,-73.8419,"Caqueta, Colombia",401489 +17010,CO,COL,170,Casanare,Colombia,5.7589,-71.5724,"Casanare, Colombia",420504 +17011,CO,COL,170,Cauca,Colombia,2.705,-76.826,"Cauca, Colombia",1464488 +17012,CO,COL,170,Cesar,Colombia,9.3373,-73.6536,"Cesar, Colombia",1200574 +17013,CO,COL,170,Choco,Colombia,5.2528,-76.826,"Choco, Colombia",534826 +17014,CO,COL,170,Cordoba,Colombia,8.0493,-75.574,"Cordoba, Colombia",1784783 +17015,CO,COL,170,Cundinamarca,Colombia,5.026,-74.03,"Cundinamarca, Colombia",2919060 +17016,CO,COL,170,Guainia,Colombia,2.5854,-68.5247,"Guainia, Colombia",48114 +17017,CO,COL,170,Guaviare,Colombia,1.0654,-73.2603,"Guaviare, Colombia",82767 +17018,CO,COL,170,Huila,Colombia,2.5359,-75.5277,"Huila, Colombia",1100386 +17019,CO,COL,170,La Guajira,Colombia,11.3548,-72.5205,"La Guajira, Colombia",880560 +17020,CO,COL,170,Magdalena,Colombia,10.4113,-74.4057,"Magdalena, Colombia",1341746 +17021,CO,COL,170,Meta,Colombia,3.272,-73.0877,"Meta, Colombia",1039722 +17022,CO,COL,170,Narino,Colombia,1.2892,-77.3579,"Narino, Colombia",1630592 +17023,CO,COL,170,Norte de Santander,Colombia,7.9463,-72.8988,"Norte de Santander, Colombia",1491689 +17024,CO,COL,170,Putumayo,Colombia,0.436,-75.5277,"Putumayo, Colombia",348182 +17025,CO,COL,170,Quindio,Colombia,4.461,-75.6674,"Quindio, Colombia",539904 +17026,CO,COL,170,Risaralda,Colombia,5.3158,-75.9928,"Risaralda, Colombia",943401 +17027,CO,COL,170,San Andres y Providencia,Colombia,12.5567,-81.7185,"San Andres y Providencia, Colombia",61280 +17028,CO,COL,170,Santander,Colombia,6.6437,-73.6536,"Santander, Colombia",2184837 +17029,CO,COL,170,Sucre,Colombia,8.814,-74.7233,"Sucre, Colombia",904863 +17030,CO,COL,170,Tolima,Colombia,4.0925,-75.1545,"Tolima, Colombia",1330187 +17031,CO,COL,170,Valle del Cauca,Colombia,3.8009,-76.6413,"Valle del Cauca, Colombia",4475886 +17032,CO,COL,170,Vaupes,Colombia,0.8554,-70.812,"Vaupes, Colombia",40797 +17033,CO,COL,170,Vichada,Colombia,4.4234,-69.2878,"Vichada, Colombia",107808 +234,FO,FRO,234,Faroe Islands,Denmark,61.8926,-6.9118,"Faroe Islands, Denmark",48865 +304,GL,GRL,304,Greenland,Denmark,71.7069,-42.6043,"Greenland, Denmark",56772 +254,GF,GUF,254,French Guiana,France,4,-53,"French Guiana, France",298682 +258,PF,PYF,258,French Polynesia,France,-17.6797,-149.4068,"French Polynesia, France",280904 +312,GP,GLP,312,Guadeloupe,France,16.265,-61.551,"Guadeloupe, France",400127 +474,MQ,MTQ,474,Martinique,France,14.6415,-61.0242,"Martinique, France",375265 +175,YT,MYT,175,Mayotte,France,-12.8275,45.166244,"Mayotte, France",272813 +540,NC,NCL,540,New Caledonia,France,-20.904305,165.618042,"New Caledonia, France",285491 +638,RE,REU,638,Reunion,France,-21.1151,55.5364,"Reunion, France",895308 +652,BL,BLM,652,Saint Barthelemy,France,17.9,-62.8333,"Saint Barthelemy, France",9885 +666,PM,SPM,666,Saint Pierre and Miquelon,France,46.8852,-56.3159,"Saint Pierre and Miquelon, France",5795 +663,MF,MAF,663,St Martin,France,18.0708,-63.0501,"St Martin, France",38659 +876,WF,WLF,876,Wallis and Futuna,France,-14.2938,-178.1165,"Wallis and Futuna, France",15289 +27601,DE,DEU,276,Baden-Wurttemberg,Germany,48.6616,9.3501,"Baden-Wurttemberg, Germany",11103043 +27602,DE,DEU,276,Bayern,Germany,48.7904,11.4979,"Bayern, Germany",13140183 +27603,DE,DEU,276,Berlin,Germany,52.52,13.405,"Berlin, Germany",3664088 +27604,DE,DEU,276,Brandenburg,Germany,52.4125,12.5316,"Brandenburg, Germany",2531071 +27605,DE,DEU,276,Bremen,Germany,53.0793,8.8017,"Bremen, Germany",680130 +27606,DE,DEU,276,Hamburg,Germany,53.5511,9.9937,"Hamburg, Germany",1852478 +27607,DE,DEU,276,Hessen,Germany,50.6521,9.1624,"Hessen, Germany",6293154 +27608,DE,DEU,276,Mecklenburg-Vorpommern,Germany,53.6127,12.4296,"Mecklenburg-Vorpommern, Germany",1610774 +27609,DE,DEU,276,Niedersachsen,Germany,52.6367,9.8451,"Niedersachsen, Germany",8003421 +27610,DE,DEU,276,Nordrhein-Westfalen,Germany,51.4332,7.6616,"Nordrhein-Westfalen, Germany",17925570 +27611,DE,DEU,276,Rheinland-Pfalz,Germany,50.1183,7.309,"Rheinland-Pfalz, Germany",4098391 +27612,DE,DEU,276,Saarland,Germany,49.3964,7.023,"Saarland, Germany",983991 +27613,DE,DEU,276,Sachsen,Germany,51.1045,13.2017,"Sachsen, Germany",4056941 +27614,DE,DEU,276,Sachsen-Anhalt,Germany,51.9503,11.6923,"Sachsen-Anhalt, Germany",2180684 +27615,DE,DEU,276,Schleswig-Holstein,Germany,54.2194,9.6961,"Schleswig-Holstein, Germany",2910875 +27616,DE,DEU,276,Thuringen,Germany,51.011,10.8453,"Thuringen, Germany",2120237 +35601,IN,IND,356,Andaman and Nicobar Islands,India,11.225999,92.968178,"Andaman and Nicobar Islands, India",417036 +35602,IN,IND,356,Andhra Pradesh,India,15.9129,79.74,"Andhra Pradesh, India",53903393 +35603,IN,IND,356,Arunachal Pradesh,India,27.768456,96.384277,"Arunachal Pradesh, India",1570458 +35604,IN,IND,356,Assam,India,26.357149,92.830441,"Assam, India",35607039 +35605,IN,IND,356,Bihar,India,25.679658,85.60484,"Bihar, India",124799926 +35606,IN,IND,356,Chandigarh,India,30.733839,76.768278,"Chandigarh, India",1158473 +35607,IN,IND,356,Chhattisgarh,India,21.264705,82.035366,"Chhattisgarh, India",29436231 +35608,IN,IND,356,Dadra and Nagar Haveli and Daman and Diu,India,20.194742,73.080901,"Dadra and Nagar Haveli and Daman and Diu, India",615724 +35609,IN,IND,356,Delhi,India,28.646519,77.10898,"Delhi, India",18710922 +35610,IN,IND,356,Goa,India,15.359682,74.057396,"Goa, India",1586250 +35611,IN,IND,356,Gujarat,India,22.694884,71.590923,"Gujarat, India",63872399 +35612,IN,IND,356,Haryana,India,29.20004,76.332824,"Haryana, India",28204692 +35613,IN,IND,356,Himachal Pradesh,India,31.927213,77.233081,"Himachal Pradesh, India",7451955 +35614,IN,IND,356,Jammu and Kashmir,India,33.75943,76.612638,"Jammu and Kashmir, India",13606320 +35615,IN,IND,356,Jharkhand,India,23.654536,85.557631,"Jharkhand, India",38593948 +35616,IN,IND,356,Karnataka,India,14.70518,76.166436,"Karnataka, India",67562686 +35617,IN,IND,356,Kerala,India,10.450898,76.405749,"Kerala, India",35699443 +35618,IN,IND,356,Ladakh,India,34.1526,77.5771,"Ladakh, India",274289 +35619,IN,IND,356,Madhya Pradesh,India,23.541513,78.289633,"Madhya Pradesh, India",85358965 +35620,IN,IND,356,Maharashtra,India,19.449759,76.108221,"Maharashtra, India",123144223 +35621,IN,IND,356,Manipur,India,24.738975,93.882541,"Manipur, India",3091545 +35622,IN,IND,356,Meghalaya,India,25.536934,91.278882,"Meghalaya, India",3366710 +35623,IN,IND,356,Mizoram,India,23.309381,92.83822,"Mizoram, India",1239244 +35624,IN,IND,356,Nagaland,India,26.06702,94.470302,"Nagaland, India",2249695 +35625,IN,IND,356,Odisha,India,20.505428,84.418059,"Odisha, India",46356334 +35626,IN,IND,356,Puducherry,India,11.882658,78.86498,"Puducherry, India",1413542 +35627,IN,IND,356,Punjab,India,30.841465,75.40879,"Punjab, India",30141373 +35628,IN,IND,356,Rajasthan,India,26.583423,73.847973,"Rajasthan, India",81032689 +35629,IN,IND,356,Sikkim,India,27.571671,88.472712,"Sikkim, India",690251 +35630,IN,IND,356,Tamil Nadu,India,11.006091,78.400624,"Tamil Nadu, India",77841267 +35631,IN,IND,356,Telangana,India,18.1124,79.0193,"Telangana, India",39362732 +35632,IN,IND,356,Tripura,India,23.746783,91.743565,"Tripura, India",4169794 +35633,IN,IND,356,Uttar Pradesh,India,26.925425,80.560982,"Uttar Pradesh, India",237882725 +35634,IN,IND,356,Uttarakhand,India,30.156447,79.197608,"Uttarakhand, India",11250858 +35635,IN,IND,356,West Bengal,India,23.814082,87.979803,"West Bengal, India",99609303 +35637,IN,IND,356,Lakshadweep,India,13.6999972,72.1833326,"Lakshadweep, India",64429 +38013,IT,ITA,380,Abruzzo,Italy,42.35122196,13.39843823,"Abruzzo, Italy",1311580 +38017,IT,ITA,380,Basilicata,Italy,40.63947052,15.80514834,"Basilicata, Italy",562869 +38018,IT,ITA,380,Calabria,Italy,38.90597598,16.59440194,"Calabria, Italy",1947131 +38015,IT,ITA,380,Campania,Italy,40.83956555,14.25084984,"Campania, Italy",5801692 +38008,IT,ITA,380,Emilia-Romagna,Italy,44.49436681,11.3417208,"Emilia-Romagna, Italy",4459477 +38006,IT,ITA,380,Friuli Venezia Giulia,Italy,45.6494354,13.76813649,"Friuli Venezia Giulia, Italy",1215220 +38012,IT,ITA,380,Lazio,Italy,41.89277044,12.48366722,"Lazio, Italy",5879082 +38007,IT,ITA,380,Liguria,Italy,44.41149315,8.9326992,"Liguria, Italy",1550640 +38003,IT,ITA,380,Lombardia,Italy,45.46679409,9.190347404,"Lombardia, Italy",10060574 +38011,IT,ITA,380,Marche,Italy,43.61675973,13.5188753,"Marche, Italy",1525271 +38014,IT,ITA,380,Molise,Italy,41.55774754,14.65916051,"Molise, Italy",305617 +38041,IT,ITA,380,P.A. Bolzano,Italy,46.49933453,11.35662422,"P.A. Bolzano, Italy",532318 +38042,IT,ITA,380,P.A. Trento,Italy,46.06893511,11.12123097,"P.A. Trento, Italy",541418 +38001,IT,ITA,380,Piemonte,Italy,45.0732745,7.680687483,"Piemonte, Italy",4356406 +38016,IT,ITA,380,Puglia,Italy,41.12559576,16.86736689,"Puglia, Italy",4029053 +38020,IT,ITA,380,Sardegna,Italy,39.21531192,9.110616306,"Sardegna, Italy",1639591 +38019,IT,ITA,380,Sicilia,Italy,38.11569725,13.3623567,"Sicilia, Italy",4999891 +38009,IT,ITA,380,Toscana,Italy,43.76923077,11.25588885,"Toscana, Italy",3729641 +38010,IT,ITA,380,Umbria,Italy,43.10675841,12.38824698,"Umbria, Italy",882015 +38002,IT,ITA,380,Valle d'Aosta,Italy,45.73750286,7.320149366,"Valle d'Aosta, Italy",125666 +38005,IT,ITA,380,Veneto,Italy,45.43490485,12.33845213,"Veneto, Italy",4905854 +39201,JP,JPN,392,Aichi,Japan,35.035551,137.211621,"Aichi, Japan",7552239 +39202,JP,JPN,392,Akita,Japan,39.748679,140.408228,"Akita, Japan",966490 +39203,JP,JPN,392,Aomori,Japan,40.781541,140.828896,"Aomori, Japan",1246371 +39204,JP,JPN,392,Chiba,Japan,35.510141,140.198917,"Chiba, Japan",6259382 +39205,JP,JPN,392,Ehime,Japan,33.624835,132.856842,"Ehime, Japan",1339215 +39206,JP,JPN,392,Fukui,Japan,35.846614,136.224654,"Fukui, Japan",767937 +39207,JP,JPN,392,Fukuoka,Japan,33.526032,130.666949,"Fukuoka, Japan",5103679 +39208,JP,JPN,392,Fukushima,Japan,37.378867,140.223295,"Fukushima, Japan",1845519 +39209,JP,JPN,392,Gifu,Japan,35.778671,137.055925,"Gifu, Japan",1986587 +39210,JP,JPN,392,Gunma,Japan,36.504479,138.985605,"Gunma, Japan",1942456 +39211,JP,JPN,392,Hiroshima,Japan,34.605309,132.788719,"Hiroshima, Japan",2804177 +39212,JP,JPN,392,Hokkaido,Japan,43.385711,142.552318,"Hokkaido, Japan",5250049 +39213,JP,JPN,392,Hyogo,Japan,35.039913,134.828057,"Hyogo, Japan",5466190 +39214,JP,JPN,392,Ibaraki,Japan,36.303588,140.319591,"Ibaraki, Japan",2860307 +39215,JP,JPN,392,Ishikawa,Japan,36.769464,136.771027,"Ishikawa, Japan",1137649 +39216,JP,JPN,392,Iwate,Japan,39.593287,141.361777,"Iwate, Japan",1226816 +39217,JP,JPN,392,Kagawa,Japan,34.217292,133.969047,"Kagawa, Japan",956347 +39218,JP,JPN,392,Kagoshima,Japan,31.009484,130.430665,"Kagoshima, Japan",1602273 +39219,JP,JPN,392,Kanagawa,Japan,35.415312,139.338983,"Kanagawa, Japan",9198268 +39220,JP,JPN,392,Kochi,Japan,33.422519,133.367307,"Kochi, Japan",698029 +39221,JP,JPN,392,Kumamoto,Japan,32.608154,130.745231,"Kumamoto, Japan",1747567 +39222,JP,JPN,392,Kyoto,Japan,35.253815,135.443341,"Kyoto, Japan",2582957 +39223,JP,JPN,392,Mie,Japan,34.508018,136.376013,"Mie, Japan",1780882 +39224,JP,JPN,392,Miyagi,Japan,38.446859,140.927086,"Miyagi, Japan",2306365 +39225,JP,JPN,392,Miyazaki,Japan,32.193204,131.299374,"Miyazaki, Japan",1073301 +39226,JP,JPN,392,Nagano,Japan,36.132134,138.045528,"Nagano, Japan",2048790 +39227,JP,JPN,392,Nagasaki,Japan,33.235712,129.608033,"Nagasaki, Japan",1326524 +39228,JP,JPN,392,Nara,Japan,34.317451,135.871644,"Nara, Japan",1330123 +39229,JP,JPN,392,Niigata,Japan,37.521819,138.918647,"Niigata, Japan",2223106 +39230,JP,JPN,392,Oita,Japan,33.200697,131.43324,"Oita, Japan",1135434 +39231,JP,JPN,392,Okayama,Japan,34.89246,133.826252,"Okayama, Japan",1889586 +39232,JP,JPN,392,Okinawa,Japan,25.768923,126.668016,"Okinawa, Japan",1453168 +39233,JP,JPN,392,Osaka,Japan,34.620965,135.507481,"Osaka, Japan",8809363 +39234,JP,JPN,392,Saga,Japan,33.286977,130.115738,"Saga, Japan",814711 +39235,JP,JPN,392,Saitama,Japan,35.997101,139.347635,"Saitama, Japan",7349693 +39236,JP,JPN,392,Shiga,Japan,35.215827,136.138064,"Shiga, Japan",1413943 +39237,JP,JPN,392,Shimane,Japan,35.07076,132.554064,"Shimane, Japan",674346 +39238,JP,JPN,392,Shizuoka,Japan,34.916975,138.407784,"Shizuoka, Japan",3643528 +39239,JP,JPN,392,Tochigi,Japan,36.689912,139.819213,"Tochigi, Japan",1933990 +39240,JP,JPN,392,Tokushima,Japan,33.919178,134.242091,"Tokushima, Japan",727977 +39241,JP,JPN,392,Tokyo,Japan,35.711343,139.446921,"Tokyo, Japan",13920663 +39242,JP,JPN,392,Tottori,Japan,35.359069,133.863619,"Tottori, Japan",555558 +39243,JP,JPN,392,Toyama,Japan,36.637464,137.269346,"Toyama, Japan",1043502 +39244,JP,JPN,392,Wakayama,Japan,33.911879,135.505446,"Wakayama, Japan",924933 +39245,JP,JPN,392,Yamagata,Japan,38.448396,140.102154,"Yamagata, Japan",1077666 +39246,JP,JPN,392,Yamaguchi,Japan,34.20119,131.573293,"Yamaguchi, Japan",1358336 +39247,JP,JPN,392,Yamanashi,Japan,35.612364,138.611489,"Yamanashi, Japan",810956 +45801,MY,MYS,458,Johor,Malaysia,1.4854,103.7618,"Johor, Malaysia",3768200 +45802,MY,MYS,458,Kedah,Malaysia,6.1184,100.3685,"Kedah, Malaysia",2185900 +45803,MY,MYS,458,Kelantan,Malaysia,6.1254,102.2381,"Kelantan, Malaysia",1892200 +45804,MY,MYS,458,Melaka,Malaysia,2.1896,102.2501,"Melaka, Malaysia",932700 +45805,MY,MYS,458,Negeri Sembilan,Malaysia,2.7258,101.9424,"Negeri Sembilan, Malaysia",1132100 +45806,MY,MYS,458,Pahang,Malaysia,3.8126,103.3256,"Pahang, Malaysia",1677100 +45807,MY,MYS,458,Perak,Malaysia,4.5921,101.0901,"Perak, Malaysia",2514300 +45808,MY,MYS,458,Perlis,Malaysia,6.4449,100.2048,"Perlis, Malaysia",254600 +45809,MY,MYS,458,Pulau Pinang,Malaysia,5.4141,100.3288,"Pulau Pinang, Malaysia",1777600 +45810,MY,MYS,458,Sabah,Malaysia,5.9788,116.0753,"Sabah, Malaysia",3904700 +45811,MY,MYS,458,Sarawak,Malaysia,1.5533,110.3592,"Sarawak, Malaysia",2818100 +45812,MY,MYS,458,Selangor,Malaysia,3.0738,101.5183,"Selangor, Malaysia",6541900 +45813,MY,MYS,458,Terengganu,Malaysia,5.3117,103.1324,"Terengganu, Malaysia",1250100 +45814,MY,MYS,458,W.P. Kuala Lumpur,Malaysia,3.139,101.6869,"W.P. Kuala Lumpur, Malaysia",1778400 +45815,MY,MYS,458,W.P. Labuan,Malaysia,5.2831,115.2308,"W.P. Labuan, Malaysia",99400 +45816,MY,MYS,458,W.P. Putrajaya,Malaysia,2.9264,101.6964,"W.P. Putrajaya, Malaysia",105400 +48401,MX,MEX,484,Aguascalientes,Mexico,21.8853,-102.2916,"Aguascalientes, Mexico",1434635 +48402,MX,MEX,484,Baja California,Mexico,30.8406,-115.2838,"Baja California, Mexico",3634868 +48403,MX,MEX,484,Baja California Sur,Mexico,26.0444,-111.6661,"Baja California Sur, Mexico",804708 +48404,MX,MEX,484,Campeche,Mexico,19.8301,-90.5349,"Campeche, Mexico",1000617 +48405,MX,MEX,484,Chiapas,Mexico,16.7569,-93.1292,"Chiapas, Mexico",5730367 +48406,MX,MEX,484,Chihuahua,Mexico,28.633,-106.0691,"Chihuahua, Mexico",3801487 +48407,MX,MEX,484,Ciudad de Mexico,Mexico,19.4326,-99.1332,"Ciudad de Mexico, Mexico",9018645 +48408,MX,MEX,484,Coahuila,Mexico,27.0587,-101.7068,"Coahuila, Mexico",3218720 +48409,MX,MEX,484,Colima,Mexico,19.1223,-104.0072,"Colima, Mexico",785153 +48410,MX,MEX,484,Durango,Mexico,24.5593,-104.6588,"Durango, Mexico",1868996 +48411,MX,MEX,484,Guanajuato,Mexico,21.019,-101.2574,"Guanajuato, Mexico",6228175 +48412,MX,MEX,484,Guerrero,Mexico,17.4392,-99.5451,"Guerrero, Mexico",3657048 +48413,MX,MEX,484,Hidalgo,Mexico,20.0911,-98.7624,"Hidalgo, Mexico",3086414 +48414,MX,MEX,484,Jalisco,Mexico,20.6595,-103.3494,"Jalisco, Mexico",8409693 +48415,MX,MEX,484,Mexico,Mexico,19.4969,-99.7233,"Mexico, Mexico",17427790 +48416,MX,MEX,484,Michoacan,Mexico,19.5665,-101.7068,"Michoacan, Mexico",4825401 +48417,MX,MEX,484,Morelos,Mexico,18.6813,-99.1013,"Morelos, Mexico",2044058 +48418,MX,MEX,484,Nayarit,Mexico,21.7514,-104.8455,"Nayarit, Mexico",1288571 +48419,MX,MEX,484,Nuevo Leon,Mexico,25.5922,-99.9962,"Nuevo Leon, Mexico",5610153 +48420,MX,MEX,484,Oaxaca,Mexico,17.0732,-96.7266,"Oaxaca, Mexico",4143593 +48421,MX,MEX,484,Puebla,Mexico,19.0414,-98.2063,"Puebla, Mexico",6604451 +48422,MX,MEX,484,Queretaro,Mexico,20.5888,-100.3899,"Queretaro, Mexico",2279637 +48423,MX,MEX,484,Quintana Roo,Mexico,19.1817,-88.4791,"Quintana Roo, Mexico",1723259 +48424,MX,MEX,484,San Luis Potosi,Mexico,22.1565,-100.9855,"San Luis Potosi, Mexico",2866142 +48425,MX,MEX,484,Sinaloa,Mexico,25.1721,-107.4795,"Sinaloa, Mexico",3156674 +48426,MX,MEX,484,Sonora,Mexico,29.2972,-110.3309,"Sonora, Mexico",3074745 +48427,MX,MEX,484,Tabasco,Mexico,17.8409,-92.6189,"Tabasco, Mexico",2572287 +48428,MX,MEX,484,Tamaulipas,Mexico,24.2669,-98.8363,"Tamaulipas, Mexico",3650602 +48429,MX,MEX,484,Tlaxcala,Mexico,19.3139,-98.2404,"Tlaxcala, Mexico",1380011 +48430,MX,MEX,484,Veracruz,Mexico,19.1738,-96.1342,"Veracruz, Mexico",8539862 +48431,MX,MEX,484,Yucatan,Mexico,20.7099,-89.0943,"Yucatan, Mexico",2259098 +48432,MX,MEX,484,Zacatecas,Mexico,22.7709,-102.5832,"Zacatecas, Mexico",1666426 +49801,MD,MDA,498,Anenii Noi,Moldova,46.8833,29.2167,"Anenii Noi, Moldova",81710 +49802,MD,MDA,498,Balti,Moldova,47.754,27.9184,"Balti, Moldova",127561 +49803,MD,MDA,498,Basarabeasca,Moldova,46.3333,28.9667,"Basarabeasca, Moldova",28978 +49804,MD,MDA,498,Bender,Moldova,46.8228,29.462,"Bender, Moldova",91197 +49805,MD,MDA,498,Briceni,Moldova,48.36,27.0858,"Briceni, Moldova",78027 +49806,MD,MDA,498,Cahul,Moldova,45.9167,28.1833,"Cahul, Moldova",119231 +49807,MD,MDA,498,Calarasi,Moldova,47.25,28.3,"Calarasi, Moldova",75075 +49808,MD,MDA,498,Camenca,Moldova,48.0319,28.6978,"Camenca, Moldova",8871 +49809,MD,MDA,498,Cantemir,Moldova,46.2854,28.1979,"Cantemir, Moldova",60001 +49810,MD,MDA,498,Causeni,Moldova,46.6333,29.4,"Causeni, Moldova",90612 +49811,MD,MDA,498,Ceadir-Lunga,Moldova,46.057,28.826,"Ceadir-Lunga, Moldova",16605 +49812,MD,MDA,498,Chisinau,Moldova,47.0105,28.8638,"Chisinau, Moldova",712218 +49813,MD,MDA,498,Cimislia,Moldova,46.5289,28.7838,"Cimislia, Moldova",60925 +49814,MD,MDA,498,Comrat,Moldova,46.2956,28.6549,"Comrat, Moldova",72254 +49815,MD,MDA,498,Criuleni,Moldova,47.212,29.1617,"Criuleni, Moldova",46442 +49816,MD,MDA,498,Donduseni,Moldova,48.2372,27.6104,"Donduseni, Moldova",87092 +49817,MD,MDA,498,Drochia,Moldova,48.0333,27.75,"Drochia, Moldova",43015 +49818,MD,MDA,498,Dubasari,Moldova,47.267,29.167,"Dubasari, Moldova",28500 +49819,MD,MDA,498,Edinet,Moldova,48.1667,27.3167,"Edinet, Moldova",90320 +49820,MD,MDA,498,Falesti,Moldova,47.5,27.72,"Falesti, Moldova",89389 +49821,MD,MDA,498,Floresti,Moldova,47.8933,28.3014,"Floresti, Moldova",155646 +49822,MD,MDA,498,Glodeni,Moldova,47.7667,27.5167,"Glodeni, Moldova",119762 +49823,MD,MDA,498,Grigoriopol,Moldova,47.1536,29.2964,"Grigoriopol, Moldova",9381 +49824,MD,MDA,498,Hincesti,Moldova,46.8167,28.5833,"Hincesti, Moldova",97704 +49825,MD,MDA,498,Ialoveni,Moldova,46.9439,28.7772,"Ialoveni, Moldova",51056 +49826,MD,MDA,498,Leova,Moldova,46.4806,28.2644,"Leova, Moldova",64924 +49827,MD,MDA,498,Nisporeni,Moldova,47.0833,28.1833,"Nisporeni, Moldova",56510 +49828,MD,MDA,498,Ocnita,Moldova,48.4061,27.4859,"Ocnita, Moldova",116271 +49829,MD,MDA,498,Orhei,Moldova,47.3735,28.822,"Orhei, Moldova",48105 +49830,MD,MDA,498,Rezina,Moldova,47.7333,28.95,"Rezina, Moldova",69454 +49831,MD,MDA,498,Ribnita,Moldova,47.7667,29,"Ribnita, Moldova",47949 +49832,MD,MDA,498,Riscani,Moldova,47.9679,27.5565,"Riscani, Moldova",87153 +49833,MD,MDA,498,Singerei,Moldova,47.6333,28.15,"Singerei, Moldova",42227 +49834,MD,MDA,498,Slobozia,Moldova,46.7333,29.7,"Slobozia, Moldova",14618 +49835,MD,MDA,498,Soldanesti,Moldova,47.8167,28.8,"Soldanesti, Moldova",94986 +49836,MD,MDA,498,Soroca,Moldova,48.1618,28.3011,"Soroca, Moldova",70594 +49837,MD,MDA,498,Stefan Voda,Moldova,46.5153,29.5297,"Stefan Voda, Moldova",88900 +49838,MD,MDA,498,Straseni,Moldova,47.1333,28.6167,"Straseni, Moldova",43154 +49839,MD,MDA,498,Taraclia,Moldova,45.9,28.6667,"Taraclia, Moldova",70126 +49840,MD,MDA,498,Telenesti,Moldova,47.5032,28.3535,"Telenesti, Moldova",383806 +49841,MD,MDA,498,Tiraspol,Moldova,46.85,29.6333,"Tiraspol, Moldova",133807 +49842,MD,MDA,498,Transnistria,Moldova,47.2153,29.463,"Transnistria, Moldova",110545 +49843,MD,MDA,498,Ungheni,Moldova,47.2077,27.8073,"Ungheni, Moldova",30804 +49844,MD,MDA,498,Vulcanesti,Moldova,45.6833,28.4042,"Vulcanesti, Moldova",12185 +52801,NL,NLD,528,Drenthe,Netherlands,52.862485,6.618435,"Drenthe, Netherlands",493682 +52802,NL,NLD,528,Flevoland,Netherlands,52.550383,5.515162,"Flevoland, Netherlands",423021 +52803,NL,NLD,528,Friesland,Netherlands,53.087337,5.7925,"Friesland, Netherlands",649957 +52804,NL,NLD,528,Gelderland,Netherlands,52.061738,5.939114,"Gelderland, Netherlands",2085952 +52805,NL,NLD,528,Groningen,Netherlands,53.217922,6.741514,"Groningen, Netherlands",585866 +52806,NL,NLD,528,Limburg,Netherlands,51.209227,5.93387,"Limburg, Netherlands",1117201 +52807,NL,NLD,528,Noord-Brabant,Netherlands,51.561174,5.184942,"Noord-Brabant, Netherlands",2562955 +52808,NL,NLD,528,Noord-Holland,Netherlands,52.600906,4.918688,"Noord-Holland, Netherlands",2879527 +52809,NL,NLD,528,Overijssel,Netherlands,52.444558,6.441722,"Overijssel, Netherlands",1162406 +52810,NL,NLD,528,Utrecht,Netherlands,52.084251,5.163824,"Utrecht, Netherlands",1354834 +52811,NL,NLD,528,Zeeland,Netherlands,51.47936,3.861559,"Zeeland, Netherlands",383488 +52812,NL,NLD,528,Zuid-Holland,Netherlands,51.937835,4.462114,"Zuid-Holland, Netherlands",3708696 +533,AW,ABW,533,Aruba,Netherlands,12.5211,-69.9683,"Aruba, Netherlands",106766 +531,CW,CUW,531,Curacao,Netherlands,12.1696,-68.99,"Curacao, Netherlands",164100 +534,SX,SXM,534,Sint Maarten,Netherlands,18.0425,-63.0548,"Sint Maarten, Netherlands",42882 +535,BQ,BES,535,"Bonaire, Sint Eustatius and Saba",Netherlands,12.1784,-68.2385,"Bonaire, Sint Eustatius and Saba, Netherlands",26221 +184,CK,COK,184,Cook Islands,New Zealand,-21.2367,-159.7777,"Cook Islands, New Zealand",17459 +570,NU,NIU,570,Niue,New Zealand,-19.0544,-169.8672,"Niue, New Zealand",1650 +56601,NG,NGA,566,Abia,Nigeria,5.4527,7.5248,"Abia, Nigeria",3727347 +56602,NG,NGA,566,Adamawa,Nigeria,9.3265,12.3984,"Adamawa, Nigeria",4248436 +56603,NG,NGA,566,Akwa Ibom,Nigeria,4.9057,7.8537,"Akwa Ibom, Nigeria",5482177 +56604,NG,NGA,566,Anambra,Nigeria,6.2209,6.937,"Anambra, Nigeria",5527809 +56605,NG,NGA,566,Bauchi,Nigeria,10.7761,9.9992,"Bauchi, Nigeria",6537314 +56606,NG,NGA,566,Bayelsa,Nigeria,4.7719,6.0699,"Bayelsa, Nigeria",2277961 +56607,NG,NGA,566,Benue,Nigeria,7.3369,8.7404,"Benue, Nigeria",5741815 +56608,NG,NGA,566,Borno,Nigeria,11.8846,13.152,"Borno, Nigeria",5860183 +56609,NG,NGA,566,Cross River,Nigeria,5.8702,8.5988,"Cross River, Nigeria",3866269 +56610,NG,NGA,566,Delta,Nigeria,5.704,5.9339,"Delta, Nigeria",5663362 +56611,NG,NGA,566,Ebonyi,Nigeria,6.2649,8.0137,"Ebonyi, Nigeria",2880383 +56612,NG,NGA,566,Edo,Nigeria,6.6342,5.9304,"Edo, Nigeria",4235595 +56613,NG,NGA,566,Ekiti,Nigeria,7.719,5.311,"Ekiti, Nigeria",3270798 +56614,NG,NGA,566,Enugu,Nigeria,6.5364,7.4356,"Enugu, Nigeria",4411119 +56615,NG,NGA,566,Federal Capital Territory,Nigeria,8.8941,7.186,"Federal Capital Territory, Nigeria",3564126 +56616,NG,NGA,566,Gombe,Nigeria,10.3638,11.1928,"Gombe, Nigeria",3256962 +56617,NG,NGA,566,Imo,Nigeria,5.572,7.0588,"Imo, Nigeria",5408756 +56618,NG,NGA,566,Jigawa,Nigeria,12.228,9.5616,"Jigawa, Nigeria",5828163 +56619,NG,NGA,566,Kaduna,Nigeria,10.3764,7.7095,"Kaduna, Nigeria",8252366 +56620,NG,NGA,566,Kano,Nigeria,11.7471,8.5247,"Kano, Nigeria",13076892 +56621,NG,NGA,566,Katsina,Nigeria,12.3797,7.6306,"Katsina, Nigeria",7831319 +56622,NG,NGA,566,Kebbi,Nigeria,11.4942,4.2333,"Kebbi, Nigeria",4440050 +56623,NG,NGA,566,Kogi,Nigeria,7.7337,6.6906,"Kogi, Nigeria",4473490 +56624,NG,NGA,566,Kwara,Nigeria,8.9669,4.3874,"Kwara, Nigeria",3192893 +56625,NG,NGA,566,Lagos,Nigeria,6.5236,3.6006,"Lagos, Nigeria",12550598 +56626,NG,NGA,566,Nasarawa,Nigeria,8.4998,8.1997,"Nasarawa, Nigeria",2523395 +56627,NG,NGA,566,Niger,Nigeria,9.9309,5.5983,"Niger, Nigeria",5556247 +56628,NG,NGA,566,Ogun,Nigeria,6.998,3.4737,"Ogun, Nigeria",5217716 +56629,NG,NGA,566,Ondo,Nigeria,6.9149,5.1478,"Ondo, Nigeria",4671695 +56630,NG,NGA,566,Osun,Nigeria,7.5629,4.52,"Osun, Nigeria",4705589 +56631,NG,NGA,566,Oyo,Nigeria,8.1574,3.6147,"Oyo, Nigeria",7840864 +56632,NG,NGA,566,Plateau,Nigeria,9.2182,9.5179,"Plateau, Nigeria",4200442 +56633,NG,NGA,566,Rivers,Nigeria,4.8396,6.9112,"Rivers, Nigeria",7303924 +56634,NG,NGA,566,Sokoto,Nigeria,13.0533,5.3223,"Sokoto, Nigeria",4998090 +56635,NG,NGA,566,Taraba,Nigeria,7.9994,10.774,"Taraba, Nigeria",3066834 +56636,NG,NGA,566,Yobe,Nigeria,12.2939,11.439,"Yobe, Nigeria",3294137 +56637,NG,NGA,566,Zamfara,Nigeria,12.1222,6.2236,"Zamfara, Nigeria",4515427 +58601,PK,PAK,586,Azad Jammu and Kashmir,Pakistan,34.027401,73.947253,"Azad Jammu and Kashmir, Pakistan",4045366 +58602,PK,PAK,586,Balochistan,Pakistan,28.328492,65.898403,"Balochistan, Pakistan",12344408 +58603,PK,PAK,586,Gilgit-Baltistan,Pakistan,35.792146,74.982138,"Gilgit-Baltistan, Pakistan",1013584 +58604,PK,PAK,586,Islamabad,Pakistan,33.665087,73.121219,"Islamabad, Pakistan",2006572 +58605,PK,PAK,586,Khyber Pakhtunkhwa,Pakistan,34.485332,72.09169,"Khyber Pakhtunkhwa, Pakistan",30523371 +58606,PK,PAK,586,Punjab,Pakistan,30.811346,72.139132,"Punjab, Pakistan",110012442 +58607,PK,PAK,586,Sindh,Pakistan,26.009446,68.776807,"Sindh, Pakistan",47886051 +60401,PE,PER,604,Amazonas,Peru,-5.077253,-78.050172,"Amazonas, Peru",426800 +60402,PE,PER,604,Ancash,Peru,-9.407125,-77.671795,"Ancash, Peru",1180600 +60403,PE,PER,604,Apurimac,Peru,-14.027713,-72.975378,"Apurimac, Peru",430700 +60404,PE,PER,604,Arequipa,Peru,-15.843524,-72.475539,"Arequipa, Peru",1497400 +60405,PE,PER,604,Ayacucho,Peru,-14.091648,-74.08344,"Ayacucho, Peru",668200 +60406,PE,PER,604,Cajamarca,Peru,-6.430284,-78.745596,"Cajamarca, Peru",1453700 +60407,PE,PER,604,Callao,Peru,-11.954609,-77.136042,"Callao, Peru",1129900 +60408,PE,PER,604,Cusco,Peru,-13.191068,-72.153609,"Cusco, Peru",1357100 +60409,PE,PER,604,Huancavelica,Peru,-13.023888,-75.00277,"Huancavelica, Peru",365300 +60410,PE,PER,604,Huanuco,Peru,-9.421676,-76.040642,"Huanuco, Peru",760300 +60411,PE,PER,604,Ica,Peru,-14.235097,-75.574821,"Ica, Peru",975200 +60412,PE,PER,604,Junin,Peru,-11.541783,-74.876968,"Junin, Peru",1361500 +60413,PE,PER,604,La Libertad,Peru,-7.92139,-78.370238,"La Libertad, Peru",2016800 +60414,PE,PER,604,Lambayeque,Peru,-6.353049,-79.824113,"Lambayeque, Peru",1310800 +60415,PE,PER,604,Lima,Peru,-11.766533,-76.604498,"Lima, Peru",10628500 +60416,PE,PER,604,Loreto,Peru,-4.124847,-74.424115,"Loreto, Peru",1027600 +60417,PE,PER,604,Madre de Dios,Peru,-11.972699,-70.53172,"Madre de Dios, Peru",173800 +60418,PE,PER,604,Moquegua,Peru,-16.860271,-70.839046,"Moquegua, Peru",192700 +60419,PE,PER,604,Pasco,Peru,-10.39655,-75.307635,"Pasco, Peru",271900 +60420,PE,PER,604,Piura,Peru,-5.133361,-80.335861,"Piura, Peru",2048000 +60421,PE,PER,604,Puno,Peru,-14.995827,-69.922726,"Puno, Peru",1238000 +60422,PE,PER,604,San Martin,Peru,-7.039531,-76.729127,"San Martin, Peru",899600 +60423,PE,PER,604,Tacna,Peru,-17.644161,-70.27756,"Tacna, Peru",371000 +60424,PE,PER,604,Tumbes,Peru,-3.857496,-80.545255,"Tumbes, Peru",251500 +60425,PE,PER,604,Ucayali,Peru,-9.621718,-73.444929,"Ucayali, Peru",589100 +61601,PL,POL,616,Dolnoslaskie,Poland,51.134,16.8842,"Dolnoslaskie, Poland",2901225 +61602,PL,POL,616,Kujawsko-pomorskie,Poland,53.1648,18.4834,"Kujawsko-pomorskie, Poland",2077775 +61603,PL,POL,616,Lubelskie,Poland,51.2494,23.1011,"Lubelskie, Poland",2117619 +61604,PL,POL,616,Lubuskie,Poland,52.2275,15.2559,"Lubuskie, Poland",1014548 +61605,PL,POL,616,Lodzkie,Poland,51.4635,19.1727,"Lodzkie, Poland",2466322 +61606,PL,POL,616,Malopolskie,Poland,49.7225,20.2503,"Malopolskie, Poland",3400577 +61607,PL,POL,616,Mazowieckie,Poland,51.8927,21.0022,"Mazowieckie, Poland",5403412 +61608,PL,POL,616,Opolskie,Poland,50.8004,17.938,"Opolskie, Poland",986506 +61609,PL,POL,616,Podkarpackie,Poland,50.0575,22.0896,"Podkarpackie, Poland",2129015 +61610,PL,POL,616,Podlaskie,Poland,53.0697,22.9675,"Podlaskie, Poland",1181533 +61611,PL,POL,616,Pomorskie,Poland,54.2944,18.1531,"Pomorskie, Poland",2333523 +61612,PL,POL,616,Slaskie,Poland,50.5717,19.322,"Slaskie, Poland",4533565 +61613,PL,POL,616,Swietokrzyskie,Poland,50.6261,20.9406,"Swietokrzyskie, Poland",1241546 +61614,PL,POL,616,Warminsko-mazurskie,Poland,53.8671,20.7028,"Warminsko-mazurskie, Poland",1428983 +61615,PL,POL,616,Wielkopolskie,Poland,52.28,17.3523,"Wielkopolskie, Poland",3493969 +61616,PL,POL,616,Zachodniopomorskie,Poland,53.4658,15.1823,"Zachodniopomorskie, Poland",1701030 +64201,RO,ROU,642,Alba,Romania,46.1559,23.5556,"Alba, Romania",74000 +64202,RO,ROU,642,Arad,Romania,46.176,21.319,"Arad, Romania",409072 +64203,RO,ROU,642,Arges,Romania,45.0723,24.8143,"Arges, Romania",612431 +64204,RO,ROU,642,Bacau,Romania,46.5833,26.9167,"Bacau, Romania",616168 +64205,RO,ROU,642,Bihor,Romania,47.0158,22.1723,"Bihor, Romania",575398 +64206,RO,ROU,642,Bistrita-Nasaud,Romania,47.2486,24.5323,"Bistrita-Nasaud, Romania",277861 +64207,RO,ROU,642,Botosani,Romania,47.745,26.6621,"Botosani, Romania",412626 +64208,RO,ROU,642,Braila,Romania,45.271,27.9743,"Braila, Romania",304925 +64209,RO,ROU,642,Brasov,Romania,45.6667,25.6167,"Brasov, Romania",549217 +64210,RO,ROU,642,Bucuresti,Romania,44.4268,26.1025,"Bucuresti, Romania",1883425 +64211,RO,ROU,642,Buzau,Romania,45.1667,26.8167,"Buzau, Romania",432054 +64212,RO,ROU,642,Calarasi,Romania,44.2085,27.3137,"Calarasi, Romania",285050 +64213,RO,ROU,642,Caras-Severin,Romania,45.114,22.0741,"Caras-Severin, Romania",274277 +64214,RO,ROU,642,Cluj,Romania,46.7667,23.5833,"Cluj, Romania",691106 +64215,RO,ROU,642,Constanta,Romania,44.1773,28.6529,"Constanta, Romania",684082 +64216,RO,ROU,642,Covasna,Romania,45.8446,26.1687,"Covasna, Romania",210177 +64217,RO,ROU,642,Dambovita,Romania,44.929,25.4254,"Dambovita, Romania",518745 +64218,RO,ROU,642,Dolj,Romania,44.1623,23.6325,"Dolj, Romania",660544 +64219,RO,ROU,642,Galati,Romania,45.4382,28.0563,"Galati, Romania",536167 +64220,RO,ROU,642,Giurgiu,Romania,43.9008,25.9739,"Giurgiu, Romania",265494 +64221,RO,ROU,642,Gorj,Romania,44.9486,23.2427,"Gorj, Romania",334238 +64222,RO,ROU,642,Harghita,Romania,46.4929,25.6457,"Harghita, Romania",304969 +64223,RO,ROU,642,Hunedoara,Romania,45.7697,22.9203,"Hunedoara, Romania",396253 +64224,RO,ROU,642,Ialomita,Romania,44.6031,27.379,"Ialomita, Romania",258669 +64225,RO,ROU,642,Iasi,Romania,47.1598,27.5872,"Iasi, Romania",772348 +64226,RO,ROU,642,Ilfov,Romania,44.5355,26.2325,"Ilfov, Romania",388738 +64227,RO,ROU,642,Maramures,Romania,47.6738,23.7456,"Maramures, Romania",516562 +64228,RO,ROU,642,Mehedinti,Romania,44.5515,22.9044,"Mehedinti, Romania",254570 +64229,RO,ROU,642,Mures,Romania,46.557,24.6723,"Mures, Romania",550846 +64230,RO,ROU,642,Neamt,Romania,46.9759,26.3819,"Neamt, Romania",470766 +64231,RO,ROU,642,Olt,Romania,44.2008,24.5023,"Olt, Romania",415530 +64232,RO,ROU,642,Prahova,Romania,45.0892,26.0829,"Prahova, Romania",762886 +64233,RO,ROU,642,Salaj,Romania,47.2091,23.2122,"Salaj, Romania",224384 +64234,RO,ROU,642,Satu Mare,Romania,47.79,22.89,"Satu Mare, Romania",329079 +64235,RO,ROU,642,Sibiu,Romania,45.7969,24.15,"Sibiu, Romania",375992 +64236,RO,ROU,642,Suceava,Romania,47.6514,26.2556,"Suceava, Romania",634810 +64237,RO,ROU,642,Teleorman,Romania,44.016,25.2987,"Teleorman, Romania",360178 +64238,RO,ROU,642,Timis,Romania,45.8139,21.3331,"Timis, Romania",683540 +64239,RO,ROU,642,Tulcea,Romania,45.1767,28.8052,"Tulcea, Romania",201462 +64240,RO,ROU,642,Valcea,Romania,45.0798,24.0835,"Valcea, Romania",355320 +64241,RO,ROU,642,Vaslui,Romania,46.6381,27.7288,"Vaslui, Romania",395500 +64242,RO,ROU,642,Vrancea,Romania,45.8135,27.0658,"Vrancea, Romania",340310 +64301,RU,RUS,643,Adygea Republic,Russia,44.6939006,40.1520421,"Adygea Republic, Russia",453376 +64302,RU,RUS,643,Altai Krai,Russia,52.6932243,82.6931424,"Altai Krai, Russia",2350080 +64303,RU,RUS,643,Altai Republic,Russia,50.7114101,86.8572186,"Altai Republic, Russia",218063 +64304,RU,RUS,643,Amur Oblast,Russia,52.8032368,128.437295,"Amur Oblast, Russia",798424 +64305,RU,RUS,643,Arkhangelsk Oblast,Russia,63.5589686,43.1221646,"Arkhangelsk Oblast, Russia",1155028 +64306,RU,RUS,643,Astrakhan Oblast,Russia,47.1878186,47.608851,"Astrakhan Oblast, Russia",1017514 +64307,RU,RUS,643,Bashkortostan Republic,Russia,54.8573563,57.1439682,"Bashkortostan Republic, Russia",4063293 +64308,RU,RUS,643,Belgorod Oblast,Russia,50.7080119,37.5837615,"Belgorod Oblast, Russia",1549876 +64309,RU,RUS,643,Bryansk Oblast,Russia,52.8873315,33.415853,"Bryansk Oblast, Russia",1210982 +64310,RU,RUS,643,Buryatia Republic,Russia,52.7182426,109.492143,"Buryatia Republic, Russia",984511 +64311,RU,RUS,643,Chechen Republic,Russia,43.3976147,45.6985005,"Chechen Republic, Russia",1436981 +64312,RU,RUS,643,Chelyabinsk Oblast,Russia,54.4223954,61.1865846,"Chelyabinsk Oblast, Russia",3493036 +64313,RU,RUS,643,Chukotka Autonomous Okrug,Russia,66.0006475,169.4900869,"Chukotka Autonomous Okrug, Russia",49348 +64314,RU,RUS,643,Chuvashia Republic,Russia,55.4259922,47.0849429,"Chuvashia Republic, Russia",1231117 +64315,RU,RUS,643,Dagestan Republic,Russia,43.0574916,47.1332224,"Dagestan Republic, Russia",3063885 +64316,RU,RUS,643,Ingushetia Republic,Russia,43.11542075,45.01713552,"Ingushetia Republic, Russia",488043 +64317,RU,RUS,643,Irkutsk Oblast,Russia,56.6370122,104.719221,"Irkutsk Oblast, Russia",2404195 +64318,RU,RUS,643,Ivanovo Oblast,Russia,56.9167446,41.4352137,"Ivanovo Oblast, Russia",1014646 +64319,RU,RUS,643,Jewish Autonomous Okrug,Russia,48.57527615,132.6630746,"Jewish Autonomous Okrug, Russia",162014 +64320,RU,RUS,643,Kabardino-Balkarian Republic,Russia,43.4806048,43.5978976,"Kabardino-Balkarian Republic, Russia",865828 +64321,RU,RUS,643,Kaliningrad Oblast,Russia,54.7293041,21.1489473,"Kaliningrad Oblast, Russia",994599 +64322,RU,RUS,643,Kalmykia Republic,Russia,46.2313018,45.3275745,"Kalmykia Republic, Russia",275413 +64323,RU,RUS,643,Kaluga Oblast,Russia,54.4382773,35.5272854,"Kaluga Oblast, Russia",1012056 +64324,RU,RUS,643,Kamchatka Krai,Russia,57.1914882,160.0383819,"Kamchatka Krai, Russia",315557 +64325,RU,RUS,643,Karachay-Cherkess Republic,Russia,43.7368326,41.7267991,"Karachay-Cherkess Republic, Russia",466305 +64326,RU,RUS,643,Karelia Republic,Russia,62.6194031,33.4920267,"Karelia Republic, Russia",622484 +64327,RU,RUS,643,Kemerovo Oblast,Russia,54.5335781,87.342861,"Kemerovo Oblast, Russia",2694877 +64328,RU,RUS,643,Khabarovsk Krai,Russia,51.6312684,136.121524,"Khabarovsk Krai, Russia",1328302 +64329,RU,RUS,643,Khakassia Republic,Russia,53.72258845,91.44293627,"Khakassia Republic, Russia",537513 +64330,RU,RUS,643,Khanty-Mansi Autonomous Okrug,Russia,61.0259025,69.0982628,"Khanty-Mansi Autonomous Okrug, Russia",1532243 +64331,RU,RUS,643,Kirov Oblast,Russia,57.9665589,49.4074599,"Kirov Oblast, Russia",1283238 +64332,RU,RUS,643,Komi Republic,Russia,63.9881421,54.3326073,"Komi Republic, Russia",840873 +64333,RU,RUS,643,Kostroma Oblast,Russia,58.424756,44.2533273,"Kostroma Oblast, Russia",643324 +64334,RU,RUS,643,Krasnodar Krai,Russia,45.7684014,39.0261044,"Krasnodar Krai, Russia",5603420 +64335,RU,RUS,643,Krasnoyarsk Krai,Russia,63.3233807,97.0979974,"Krasnoyarsk Krai, Russia",2876497 +64336,RU,RUS,643,Kurgan Oblast,Russia,55.7655302,64.5632681,"Kurgan Oblast, Russia",845537 +64337,RU,RUS,643,Kursk Oblast,Russia,51.6568453,36.4852695,"Kursk Oblast, Russia",1115237 +64338,RU,RUS,643,Leningrad Oblast,Russia,60.1853296,32.3925325,"Leningrad Oblast, Russia",1813816 +64339,RU,RUS,643,Lipetsk Oblast,Russia,52.6935178,39.1122664,"Lipetsk Oblast, Russia",1150201 +64340,RU,RUS,643,Magadan Oblast,Russia,62.48858785,153.9903764,"Magadan Oblast, Russia",144091 +64341,RU,RUS,643,Mari El Republic,Russia,56.5767504,47.8817512,"Mari El Republic, Russia",682333 +64342,RU,RUS,643,Mordovia Republic,Russia,54.4419829,44.4661144,"Mordovia Republic, Russia",805056 +64343,RU,RUS,643,Moscow,Russia,55.7504461,37.6174943,"Moscow, Russia",12506468 +64344,RU,RUS,643,Moscow Oblast,Russia,55.5043158,38.0353929,"Moscow Oblast, Russia",7503385 +64345,RU,RUS,643,Murmansk Oblast,Russia,68.0000418,33.9999151,"Murmansk Oblast, Russia",753557 +64346,RU,RUS,643,Nenets Autonomous Okrug,Russia,68.27557185,57.1686375,"Nenets Autonomous Okrug, Russia",43997 +64347,RU,RUS,643,Nizhny Novgorod Oblast,Russia,55.4718033,44.0911594,"Nizhny Novgorod Oblast, Russia",3234752 +64348,RU,RUS,643,North Ossetia - Alania Republic,Russia,42.7933611,44.6324493,"North Ossetia - Alania Republic, Russia",701765 +64349,RU,RUS,643,Novgorod Oblast,Russia,58.2843833,32.5169757,"Novgorod Oblast, Russia",606476 +64350,RU,RUS,643,Novosibirsk Oblast,Russia,54.9720169,79.4813924,"Novosibirsk Oblast, Russia",2788849 +64351,RU,RUS,643,Omsk Oblast,Russia,56.0935263,73.5099936,"Omsk Oblast, Russia",1960081 +64352,RU,RUS,643,Orel Oblast,Russia,52.9685433,36.0692477,"Orel Oblast, Russia",747247 +64353,RU,RUS,643,Orenburg Oblast,Russia,52.0269262,54.7276647,"Orenburg Oblast, Russia",1977720 +64354,RU,RUS,643,Penza Oblast,Russia,53.1655415,44.7879181,"Penza Oblast, Russia",1331655 +64355,RU,RUS,643,Perm Krai,Russia,58.5951603,56.3159546,"Perm Krai, Russia",2623122 +64356,RU,RUS,643,Primorsky Krai,Russia,45.0819456,134.726645,"Primorsky Krai, Russia",1913037 +64357,RU,RUS,643,Pskov Oblast,Russia,57.5358729,28.8586826,"Pskov Oblast, Russia",636546 +64358,RU,RUS,643,Rostov Oblast,Russia,47.6222451,40.7957942,"Rostov Oblast, Russia",4220452 +64359,RU,RUS,643,Ryazan Oblast,Russia,54.4226732,40.5705246,"Ryazan Oblast, Russia",1121474 +64360,RU,RUS,643,Saint Petersburg,Russia,59.9606739,30.1586551,"Saint Petersburg, Russia",5351935 +64361,RU,RUS,643,Sakha (Yakutiya) Republic,Russia,66.941626,129.642371,"Sakha (Yakutiya) Republic, Russia",964330 +64362,RU,RUS,643,Sakhalin Oblast,Russia,49.7219665,143.448533,"Sakhalin Oblast, Russia",490181 +64363,RU,RUS,643,Samara Oblast,Russia,53.2128813,50.8914633,"Samara Oblast, Russia",3193514 +64364,RU,RUS,643,Saratov Oblast,Russia,51.6520555,46.8631952,"Saratov Oblast, Russia",2462950 +64365,RU,RUS,643,Smolensk Oblast,Russia,55.0343496,33.0192065,"Smolensk Oblast, Russia",949348 +64366,RU,RUS,643,Stavropol Krai,Russia,44.8632577,43.4406913,"Stavropol Krai, Russia",2800674 +64367,RU,RUS,643,Sverdlovsk Oblast,Russia,58.6414755,61.8021546,"Sverdlovsk Oblast, Russia",4325256 +64368,RU,RUS,643,Tambov Oblast,Russia,52.9019574,41.3578918,"Tambov Oblast, Russia",1033552 +64369,RU,RUS,643,Tatarstan Republic,Russia,55.7648572,52.43104273,"Tatarstan Republic, Russia",3894284 +64370,RU,RUS,643,Tomsk Oblast,Russia,58.6124279,82.0475315,"Tomsk Oblast, Russia",1078280 +64371,RU,RUS,643,Tula Oblast,Russia,53.9570701,37.3690909,"Tula Oblast, Russia",1491855 +64372,RU,RUS,643,Tver Oblast,Russia,57.1134475,35.1744428,"Tver Oblast, Russia",1283873 +64373,RU,RUS,643,Tyumen Oblast,Russia,58.8206488,70.3658837,"Tyumen Oblast, Russia",3692400 +64374,RU,RUS,643,Tyva Republic,Russia,51.4017149,93.8582593,"Tyva Republic, Russia",321722 +64375,RU,RUS,643,Udmurt Republic,Russia,57.1961165,52.6959832,"Udmurt Republic, Russia",1513044 +64376,RU,RUS,643,Ulyanovsk Oblast,Russia,54.1463177,47.2324921,"Ulyanovsk Oblast, Russia",1246618 +64377,RU,RUS,643,Vladimir Oblast,Russia,56.0503336,40.6561633,"Vladimir Oblast, Russia",1378337 +64378,RU,RUS,643,Volgograd Oblast,Russia,49.6048339,44.2903582,"Volgograd Oblast, Russia",2521276 +64379,RU,RUS,643,Vologda Oblast,Russia,60.0391461,43.1215213,"Vologda Oblast, Russia",1176689 +64380,RU,RUS,643,Voronezh Oblast,Russia,50.9800393,40.1506507,"Voronezh Oblast, Russia",2333768 +64381,RU,RUS,643,Yamalo-Nenets Autonomous Okrug,Russia,67.1471631,74.3415488,"Yamalo-Nenets Autonomous Okrug, Russia",538547 +64382,RU,RUS,643,Yaroslavl Oblast,Russia,57.7781976,39.0021095,"Yaroslavl Oblast, Russia",1265684 +64383,RU,RUS,643,Zabaykalsky Krai,Russia,52.248521,115.956325,"Zabaykalsky Krai, Russia",1072806 +70301,SK,SVK,703,Banska Bystrica,Slovakia,48.7363,19.1462,"Banska Bystrica, Slovakia",657119 +70302,SK,SVK,703,Bratislava,Slovakia,48.1486,17.107,"Bratislava, Slovakia",603699 +70303,SK,SVK,703,Kosice,Slovakia,48.7164,21.2611,"Kosice, Slovakia",771947 +70304,SK,SVK,703,Nitra,Slovakia,48.3061,18.0764,"Nitra, Slovakia",708498 +70305,SK,SVK,703,Presov,Slovakia,49.0018,21.2393,"Presov, Slovakia",798596 +70306,SK,SVK,703,Trencin,Slovakia,48.8849,18.0335,"Trencin, Slovakia",600386 +70307,SK,SVK,703,Trnava,Slovakia,48.3709,17.5833,"Trnava, Slovakia",554172 +70308,SK,SVK,703,Zilina,Slovakia,49.2194,18.7408,"Zilina, Slovakia",694763 +72401,ES,ESP,724,Andalusia,Spain,37.5443,-4.7278,"Andalusia, Spain",8427405 +72402,ES,ESP,724,Aragon,Spain,41.5976,-0.9057,"Aragon, Spain",1320586 +72403,ES,ESP,724,Asturias,Spain,43.3614,-5.8593,"Asturias, Spain",1022205 +72404,ES,ESP,724,Baleares,Spain,39.710358,2.995148,"Baleares, Spain",1188220 +72405,ES,ESP,724,Canarias,Spain,28.2916,-16.6291,"Canarias, Spain",2206901 +72406,ES,ESP,724,Cantabria,Spain,43.1828,-3.9878,"Cantabria, Spain",581641 +72407,ES,ESP,724,Castilla - La Mancha,Spain,39.2796,-3.0977,"Castilla - La Mancha, Spain",2034877 +72408,ES,ESP,724,Castilla y Leon,Spain,41.8357,-4.3976,"Castilla y Leon, Spain",2407733 +72409,ES,ESP,724,Catalonia,Spain,41.5912,1.5209,"Catalonia, Spain",7566431 +72410,ES,ESP,724,Ceuta,Spain,35.8894,-5.3213,"Ceuta, Spain",84829 +72411,ES,ESP,724,C. Valenciana,Spain,39.484,-0.7533,"C. Valenciana, Spain",4974969 +72412,ES,ESP,724,Extremadura,Spain,39.4937,-6.0679,"Extremadura, Spain",1065424 +72413,ES,ESP,724,Galicia,Spain,42.5751,-8.1339,"Galicia, Spain",2700441 +72414,ES,ESP,724,Madrid,Spain,40.4168,-3.7038,"Madrid, Spain",6641649 +72415,ES,ESP,724,Melilla,Spain,35.2923,-2.9381,"Melilla, Spain",84689 +72416,ES,ESP,724,Murcia,Spain,37.9922,-1.1307,"Murcia, Spain",1487663 +72417,ES,ESP,724,Navarra,Spain,42.6954,-1.6761,"Navarra, Spain",649946 +72418,ES,ESP,724,Pais Vasco,Spain,42.9896,-2.6189,"Pais Vasco, Spain",2177880 +72419,ES,ESP,724,La Rioja,Spain,42.2871,-2.5396,"La Rioja, Spain",313571 +75201,SE,SWE,752,Blekinge,Sweden,56.2784,15.018,"Blekinge, Sweden",159606 +75202,SE,SWE,752,Dalarna,Sweden,61.0917,14.6664,"Dalarna, Sweden",287966 +75203,SE,SWE,752,Gavleborg,Sweden,61.3012,16.1534,"Gavleborg, Sweden",287382 +75204,SE,SWE,752,Gotland,Sweden,57.4684,18.4867,"Gotland, Sweden",59686 +75205,SE,SWE,752,Halland,Sweden,56.8967,12.8034,"Halland, Sweden",333848 +75206,SE,SWE,752,Jamtland Harjedalen,Sweden,63.1712,14.9592,"Jamtland Harjedalen, Sweden",130810 +75207,SE,SWE,752,Jonkoping,Sweden,57.3708,14.3439,"Jonkoping, Sweden",363599 +75208,SE,SWE,752,Kalmar,Sweden,57.235,16.1849,"Kalmar, Sweden",245446 +75209,SE,SWE,752,Kronoberg,Sweden,56.7183,14.4115,"Kronoberg, Sweden",201469 +75210,SE,SWE,752,Norrbotten,Sweden,66.8309,20.3992,"Norrbotten, Sweden",250093 +75211,SE,SWE,752,Orebro,Sweden,59.535,15.0066,"Orebro, Sweden",304805 +75212,SE,SWE,752,Ostergotland,Sweden,58.3454,15.5198,"Ostergotland, Sweden",465495 +75213,SE,SWE,752,Skane,Sweden,55.9903,13.5958,"Skane, Sweden",1377827 +75214,SE,SWE,752,Sormland,Sweden,59.0336,16.7519,"Sormland, Sweden",297540 +75215,SE,SWE,752,Stockholm,Sweden,59.6025,18.1384,"Stockholm, Sweden",2377081 +75216,SE,SWE,752,Uppsala,Sweden,60.0092,17.2715,"Uppsala, Sweden",383713 +75217,SE,SWE,752,Varmland,Sweden,59.7294,13.2354,"Varmland, Sweden",282414 +75218,SE,SWE,752,Vasterbotten,Sweden,65.3337,16.5162,"Vasterbotten, Sweden",271736 +75219,SE,SWE,752,Vasternorrland,Sweden,63.4276,17.7292,"Vasternorrland, Sweden",245347 +75220,SE,SWE,752,Vastmanland,Sweden,59.6714,16.2159,"Vastmanland, Sweden",275845 +75221,SE,SWE,752,Vastra Gotaland,Sweden,58.2528,13.0596,"Vastra Gotaland, Sweden",1725881 +80401,UA,UKR,804,Cherkasy Oblast,Ukraine,49.4444,32.0598,"Cherkasy Oblast, Ukraine",1206351 +80402,UA,UKR,804,Chernihiv Oblast,Ukraine,51.4982,31.2893,"Chernihiv Oblast, Ukraine",1005745 +80403,UA,UKR,804,Chernivtsi Oblast,Ukraine,48.2917,25.9352,"Chernivtsi Oblast, Ukraine",904374 +80404,UA,UKR,804,Crimea Republic*,Ukraine,45.2835,34.2008,"Crimea Republic*, Ukraine",1913731 +80405,UA,UKR,804,Dnipropetrovsk Oblast,Ukraine,48.4647,35.0462,"Dnipropetrovsk Oblast, Ukraine",3206477 +80406,UA,UKR,804,Donetsk Oblast,Ukraine,48.0159,37.8028,"Donetsk Oblast, Ukraine",4165901 +80407,UA,UKR,804,Ivano-Frankivsk Oblast,Ukraine,48.9226,24.7111,"Ivano-Frankivsk Oblast, Ukraine",1373252 +80408,UA,UKR,804,Kharkiv Oblast,Ukraine,49.9935,36.2304,"Kharkiv Oblast, Ukraine",2675598 +80409,UA,UKR,804,Kherson Oblast,Ukraine,46.6354,32.6169,"Kherson Oblast, Ukraine",1037640 +80410,UA,UKR,804,Khmelnytskyi Oblast,Ukraine,49.423,26.9871,"Khmelnytskyi Oblast, Ukraine",1264705 +80411,UA,UKR,804,Kiev,Ukraine,50.4501,30.5234,"Kiev, Ukraine",2950800 +80412,UA,UKR,804,Kiev Oblast,Ukraine,50.053,30.7667,"Kiev Oblast, Ukraine",1767940 +80413,UA,UKR,804,Kirovohrad Oblast,Ukraine,48.5079,32.2623,"Kirovohrad Oblast, Ukraine",945549 +80414,UA,UKR,804,Luhansk Oblast,Ukraine,48.574,39.3078,"Luhansk Oblast, Ukraine",2151833 +80415,UA,UKR,804,Lviv Oblast,Ukraine,49.8397,24.0297,"Lviv Oblast, Ukraine",2522021 +80416,UA,UKR,804,Mykolaiv Oblast,Ukraine,46.975,31.9946,"Mykolaiv Oblast, Ukraine",2522021 +80417,UA,UKR,804,Odessa Oblast,Ukraine,46.4846,30.7326,"Odessa Oblast, Ukraine",2380308 +80418,UA,UKR,804,Poltava Oblast,Ukraine,49.5883,34.5514,"Poltava Oblast, Ukraine",1400439 +80419,UA,UKR,804,Rivne Oblast,Ukraine,50.6199,26.2516,"Rivne Oblast, Ukraine",1157301 +80420,UA,UKR,804,Sevastopol*,Ukraine,44.6054,33.522,"Sevastopol*, Ukraine",443211 +80421,UA,UKR,804,Sumy Oblast,Ukraine,50.9077,34.7981,"Sumy Oblast, Ukraine",1081418 +80422,UA,UKR,804,Ternopil Oblast,Ukraine,49.5535,25.5948,"Ternopil Oblast, Ukraine",1045879 +80423,UA,UKR,804,Vinnytsia Oblast,Ukraine,49.2331,28.4682,"Vinnytsia Oblast, Ukraine",1560394 +80424,UA,UKR,804,Volyn Oblast,Ukraine,50.7472,25.3254,"Volyn Oblast, Ukraine",1035330 +80425,UA,UKR,804,Zakarpattia Oblast,Ukraine,48.6208,22.2879,"Zakarpattia Oblast, Ukraine",1256802 +80426,UA,UKR,804,Zaporizhia Oblast,Ukraine,47.8388,35.1396,"Zaporizhia Oblast, Ukraine",1705836 +80427,UA,UKR,804,Zhytomyr Oblast,Ukraine,50.2547,28.6587,"Zhytomyr Oblast, Ukraine",1220193 +82601,GB,GBR,826,England,United Kingdom,52.3555,-1.1743,"England, United Kingdom",55977200 +82602,GB,GBR,826,Northern Ireland,United Kingdom,54.7877,-6.4923,"Northern Ireland, United Kingdom",1881600 +82603,GB,GBR,826,Scotland,United Kingdom,56.4907,-4.2026,"Scotland, United Kingdom",5463300 +82604,GB,GBR,826,Wales,United Kingdom,52.1307,-3.7837,"Wales, United Kingdom",3138600 +60,BM,BMU,60,Bermuda,United Kingdom,32.3078,-64.7505,"Bermuda, United Kingdom",62273 +92,VG,VGB,92,British Virgin Islands,United Kingdom,18.4207,-64.64,"British Virgin Islands, United Kingdom",30237 +136,KY,CYM,136,Cayman Islands,United Kingdom,19.3133,-81.2546,"Cayman Islands, United Kingdom",65720 +8261,GB,GBR,826,Channel Islands,United Kingdom,49.3723,-2.3644,"Channel Islands, United Kingdom",170499 +831,GG,GGY,831,Guernsey,United Kingdom,49.448196,-2.58949,"Guernsey, United Kingdom",63000 +832,JE,JEY,832,Jersey,United Kingdom,49.2138,-2.1358,"Jersey, United Kingdom",109300 +238,FK,FLK,238,Falkland Islands (Malvinas),United Kingdom,-51.7963,-59.5236,"Falkland Islands (Malvinas), United Kingdom",3483 +292,GI,GIB,292,Gibraltar,United Kingdom,36.1408,-5.3536,"Gibraltar, United Kingdom",33691 +833,IM,IMN,833,Isle of Man,United Kingdom,54.2361,-4.5481,"Isle of Man, United Kingdom",85032 +500,MS,MSR,500,Montserrat,United Kingdom,16.742498,-62.187366,"Montserrat, United Kingdom",4999 +796,TC,TCA,796,Turks and Caicos Islands,United Kingdom,21.694,-71.7979,"Turks and Caicos Islands, United Kingdom",38718 +612,PN,PCN,612,Pitcairn Islands,United Kingdom,-24.3768,-128.3242,"Pitcairn Islands, United Kingdom",67 +660,AI,AIA,660,Anguilla,United Kingdom,18.2206,-63.0686,"Anguilla, United Kingdom",15002 +654,SH,SHN,654,"Saint Helena, Ascension and Tristan da Cunha",United Kingdom,-7.9467,-14.3559,"Saint Helena, Ascension and Tristan da Cunha, United Kingdom",5661 +3601,AU,AUS,36,Australian Capital Territory,Australia,-35.4735,149.0124,"Australian Capital Territory, Australia",428100 +3602,AU,AUS,36,New South Wales,Australia,-33.8688,151.2093,"New South Wales, Australia",8118000 +3603,AU,AUS,36,Northern Territory,Australia,-12.4634,130.8456,"Northern Territory, Australia",245600 +3604,AU,AUS,36,Queensland,Australia,-27.4698,153.0251,"Queensland, Australia",5115500 +3605,AU,AUS,36,South Australia,Australia,-34.9285,138.6007,"South Australia, Australia",1756500 +3606,AU,AUS,36,Tasmania,Australia,-42.8821,147.3272,"Tasmania, Australia",535500 +3607,AU,AUS,36,Victoria,Australia,-37.8136,144.9631,"Victoria, Australia",6629900 +3608,AU,AUS,36,Western Australia,Australia,-31.9505,115.8605,"Western Australia, Australia",2630600 +12401,CA,CAN,124,Alberta,Canada,53.9333,-116.5765,"Alberta, Canada",4442879 +12402,CA,CAN,124,British Columbia,Canada,53.7267,-127.6476,"British Columbia, Canada",5214805 +12403,CA,CAN,124,Manitoba,Canada,53.7609,-98.8139,"Manitoba, Canada",1383765 +12404,CA,CAN,124,New Brunswick,Canada,46.5653,-66.4619,"New Brunswick, Canada",789225 +12405,CA,CAN,124,Newfoundland and Labrador,Canada,53.1355,-57.6604,"Newfoundland and Labrador, Canada",520553 +12406,CA,CAN,124,Northwest Territories,Canada,64.8255,-124.8457,"Northwest Territories,Canada",45504 +12407,CA,CAN,124,Nova Scotia,Canada,44.682,-63.7443,"Nova Scotia, Canada",992055 +12408,CA,CAN,124,Ontario,Canada,51.2538,-85.3232,"Ontario, Canada",14826276 +12409,CA,CAN,124,Prince Edward Island,Canada,46.5107,-63.4168,"Prince Edward Island, Canada",164318 +12410,CA,CAN,124,Quebec,Canada,52.9399,-73.5491,"Quebec, Canada",8604495 +12411,CA,CAN,124,Saskatchewan,Canada,52.9399,-106.4509,"Saskatchewan, Canada",1179844 +12412,CA,CAN,124,Yukon,Canada,64.2823,-135,"Yukon, Canada",42986 +12416,CA,CAN,124,Nunavut,Canada,70.2998,-83.1076,"Nunavut, Canada",39403 +15601,CN,CHN,156,Anhui,China,31.8257,117.2264,"Anhui, China",61027171 +15602,CN,CHN,156,Beijing,China,40.1824,116.4142,"Beijing, China",21893095 +15603,CN,CHN,156,Chongqing,China,30.0572,107.874,"Chongqing, China",32054159 +15604,CN,CHN,156,Fujian,China,26.0789,117.9874,"Fujian, China",41540086 +15605,CN,CHN,156,Gansu,China,35.7518,104.2861,"Gansu, China",25019831 +15606,CN,CHN,156,Guangdong,China,23.3417,113.4244,"Guangdong, China",126012510 +15607,CN,CHN,156,Guangxi,China,23.8298,108.7881,"Guangxi, China",50126804 +15608,CN,CHN,156,Guizhou,China,26.8154,106.8748,"Guizhou, China",38562148 +15609,CN,CHN,156,Hainan,China,19.1959,109.7453,"Hainan, China",10081232 +15610,CN,CHN,156,Hebei,China,37.8957,114.9042,"Hebei, China",74610235 +15611,CN,CHN,156,Heilongjiang,China,47.862,127.7615,"Heilongjiang, China",31850088 +15612,CN,CHN,156,Henan,China,33.882,113.614,"Henan, China",99365519 +15613,CN,CHN,156,Hubei,China,30.9756,112.2707,"Hubei, China",57752557 +15614,CN,CHN,156,Hunan,China,27.6104,111.7088,"Hunan, China",66444864 +15615,CN,CHN,156,Inner Mongolia,China,44.0935,113.9448,"Inner Mongolia, China",24049155 +15616,CN,CHN,156,Jiangsu,China,32.9711,119.455,"Jiangsu, China",84748016 +15617,CN,CHN,156,Jiangxi,China,27.614,115.7221,"Jiangxi, China",45188635 +15618,CN,CHN,156,Jilin,China,43.6661,126.1923,"Jilin, China",24073453 +15619,CN,CHN,156,Liaoning,China,41.2956,122.6085,"Liaoning, China",42591407 +15620,CN,CHN,156,Ningxia,China,37.2692,106.1655,"Ningxia, China",7202654 +15621,CN,CHN,156,Qinghai,China,35.7452,95.9956,"Qinghai, China",5923957 +15622,CN,CHN,156,Shaanxi,China,35.1917,108.8701,"Shaanxi, China",39528999 +15623,CN,CHN,156,Shandong,China,36.3427,118.1498,"Shandong, China",101527453 +15624,CN,CHN,156,Shanghai,China,31.202,121.4491,"Shanghai, China",24870895 +15625,CN,CHN,156,Shanxi,China,37.5777,112.2922,"Shanxi, China",34915616 +15626,CN,CHN,156,Sichuan,China,30.6171,102.7103,"Sichuan, China",83674866 +15627,CN,CHN,156,Tianjin,China,39.3054,117.323,"Tianjin, China",13866009 +15628,CN,CHN,156,Tibet,China,31.6927,88.0924,"Tibet, China",3648100 +15629,CN,CHN,156,Xinjiang,China,41.1129,85.2401,"Xinjiang, China",25852345 +15630,CN,CHN,156,Yunnan,China,24.974,101.487,"Yunnan, China",47209277 +15631,CN,CHN,156,Zhejiang,China,29.1832,120.0934,"Zhejiang, China",64567588 +344,HK,HKG,344,Hong Kong,China,22.3,114.2,"Hong Kong, China",7496988 +446,MO,MAC,446,Macau,China,22.1667,113.55,"Macau, China",649342 +16,AS,ASM,16,American Samoa,US,-14.271,-170.132,"American Samoa, US",55641 +316,GU,GUM,316,Guam,US,13.4443,144.7937,"Guam, US",164229 +580,MP,MNP,580,Northern Mariana Islands,US,15.0979,145.6739,"Northern Mariana Islands, US",55144 +850,VI,VIR,850,Virgin Islands,US,18.3358,-64.8963,"Virgin Islands, US",107268 +630,PR,PRI,630,Puerto Rico,US,18.2208,-66.5901,"Puerto Rico, US",3193694 +84000001,US,USA,840,Alabama,US,32.3182,-86.9023,"Alabama, US",4903185 +84000002,US,USA,840,Alaska,US,61.3707,-152.4044,"Alaska, US",731545 +84000004,US,USA,840,Arizona,US,33.7298,-111.4312,"Arizona, US",7278717 +84000005,US,USA,840,Arkansas,US,34.9697,-92.3731,"Arkansas, US",3017804 +84000006,US,USA,840,California,US,36.1162,-119.6816,"California, US",39512223 +84000008,US,USA,840,Colorado,US,39.0598,-105.3111,"Colorado, US",5758736 +84000009,US,USA,840,Connecticut,US,41.5978,-72.7554,"Connecticut, US",3565287 +84000010,US,USA,840,Delaware,US,39.3185,-75.5071,"Delaware, US",973764 +84000011,US,USA,840,District of Columbia,US,38.8974,-77.0268,"District of Columbia, US",705749 +84000012,US,USA,840,Florida,US,27.7663,-81.6868,"Florida, US",21477737 +84000013,US,USA,840,Georgia,US,33.0406,-83.6431,"Georgia, US",10617423 +84000015,US,USA,840,Hawaii,US,21.0943,-157.4983,"Hawaii, US",1415872 +84000016,US,USA,840,Idaho,US,44.2405,-114.4788,"Idaho, US",1787065 +84000017,US,USA,840,Illinois,US,40.3495,-88.9861,"Illinois, US",12671821 +84000018,US,USA,840,Indiana,US,39.8494,-86.2583,"Indiana, US",6732219 +84000019,US,USA,840,Iowa,US,42.0115,-93.2105,"Iowa, US",3155070 +84000020,US,USA,840,Kansas,US,38.5266,-96.7265,"Kansas, US",2913314 +84000021,US,USA,840,Kentucky,US,37.6681,-84.6701,"Kentucky, US",4467673 +84000022,US,USA,840,Louisiana,US,31.1695,-91.8678,"Louisiana, US",4648794 +84000023,US,USA,840,Maine,US,44.6939,-69.3819,"Maine, US",1344212 +84000024,US,USA,840,Maryland,US,39.0639,-76.8021,"Maryland, US",6045680 +84000025,US,USA,840,Massachusetts,US,42.2302,-71.5301,"Massachusetts, US",6892503 +84000026,US,USA,840,Michigan,US,43.3266,-84.5361,"Michigan, US",9986857 +84000027,US,USA,840,Minnesota,US,45.6945,-93.9002,"Minnesota, US",5639632 +84000028,US,USA,840,Mississippi,US,32.7416,-89.6787,"Mississippi, US",2976149 +84000029,US,USA,840,Missouri,US,38.4561,-92.2884,"Missouri, US",6137428 +84000030,US,USA,840,Montana,US,46.9219,-110.4544,"Montana, US",1068778 +84000031,US,USA,840,Nebraska,US,41.1254,-98.2681,"Nebraska, US",1934408 +84000032,US,USA,840,Nevada,US,38.3135,-117.0554,"Nevada, US",3080156 +84000033,US,USA,840,New Hampshire,US,43.4525,-71.5639,"New Hampshire, US",1359711 +84000034,US,USA,840,New Jersey,US,40.2989,-74.521,"New Jersey, US",8882190 +84000035,US,USA,840,New Mexico,US,34.8405,-106.2485,"New Mexico, US",2096829 +84000036,US,USA,840,New York,US,42.1657,-74.9481,"New York, US",19453561 +84000037,US,USA,840,North Carolina,US,35.6301,-79.8064,"North Carolina, US",10488084 +84000038,US,USA,840,North Dakota,US,47.5289,-99.784,"North Dakota, US",762062 +84000039,US,USA,840,Ohio,US,40.3888,-82.7649,"Ohio, US",11689100 +84000040,US,USA,840,Oklahoma,US,35.5653,-96.9289,"Oklahoma, US",3956971 +84000041,US,USA,840,Oregon,US,44.572,-122.0709,"Oregon, US",4217737 +84000042,US,USA,840,Pennsylvania,US,40.5908,-77.2098,"Pennsylvania, US",12801989 +84000044,US,USA,840,Rhode Island,US,41.6809,-71.5118,"Rhode Island, US",1059361 +84000045,US,USA,840,South Carolina,US,33.8569,-80.945,"South Carolina, US",5148714 +84000046,US,USA,840,South Dakota,US,44.2998,-99.4388,"South Dakota, US",884659 +84000047,US,USA,840,Tennessee,US,35.7478,-86.6923,"Tennessee, US",6829174 +84000048,US,USA,840,Texas,US,31.0545,-97.5635,"Texas, US",28995881 +84000049,US,USA,840,Utah,US,40.15,-111.8624,"Utah, US",3205958 +84000050,US,USA,840,Vermont,US,44.0459,-72.7107,"Vermont, US",623989 +84000051,US,USA,840,Virginia,US,37.7693,-78.17,"Virginia, US",8535519 +84000053,US,USA,840,Washington,US,47.4009,-121.4905,"Washington, US",7614893 +84000054,US,USA,840,West Virginia,US,38.4912,-80.9545,"West Virginia, US",1792147 +84000055,US,USA,840,Wisconsin,US,44.2685,-89.6165,"Wisconsin, US",5822434 +84000056,US,USA,840,Wyoming,US,42.756,-107.3025,"Wyoming, US",578759 diff --git a/dotnet/samples/LearnResources/Resources/PopulationByCountry.csv b/dotnet/samples/LearnResources/Resources/PopulationByCountry.csv new file mode 100644 index 000000000000..b3dcae49eb73 --- /dev/null +++ b/dotnet/samples/LearnResources/Resources/PopulationByCountry.csv @@ -0,0 +1,199 @@ +UID,iso2,iso3,code3,Country_Region,Lat,Long,Population +4,AF,AFG,4,Afghanistan,33.93911,67.709953,38928341 +8,AL,ALB,8,Albania,41.1533,20.1683,2877800 +10,AQ,ATA,10,Antarctica,-71.9499,23.347,0 +12,DZ,DZA,12,Algeria,28.0339,1.6596,43851043 +20,AD,AND,20,Andorra,42.5063,1.5218,77265 +24,AO,AGO,24,Angola,-11.2027,17.8739,32866268 +28,AG,ATG,28,Antigua and Barbuda,17.0608,-61.7964,97928 +32,AR,ARG,32,Argentina,-38.4161,-63.6167,45195777 +51,AM,ARM,51,Armenia,40.0691,45.0382,2963234 +40,AT,AUT,40,Austria,47.5162,14.5501,9006400 +31,AZ,AZE,31,Azerbaijan,40.1431,47.5769,10139175 +44,BS,BHS,44,Bahamas,25.025885,-78.035889,393248 +48,BH,BHR,48,Bahrain,26.0275,50.55,1701583 +50,BD,BGD,50,Bangladesh,23.685,90.3563,164689383 +52,BB,BRB,52,Barbados,13.1939,-59.5432,287371 +112,BY,BLR,112,Belarus,53.7098,27.9534,9449321 +56,BE,BEL,56,Belgium,50.8333,4.469936,11492641 +84,BZ,BLZ,84,Belize,17.1899,-88.4976,397621 +204,BJ,BEN,204,Benin,9.3077,2.3158,12123198 +64,BT,BTN,64,Bhutan,27.5142,90.4336,771612 +68,BO,BOL,68,Bolivia,-16.2902,-63.5887,11673029 +70,BA,BIH,70,Bosnia and Herzegovina,43.9159,17.6791,3280815 +72,BW,BWA,72,Botswana,-22.3285,24.6849,2351625 +76,BR,BRA,76,Brazil,-14.235,-51.9253,212559409 +96,BN,BRN,96,Brunei,4.5353,114.7277,437483 +100,BG,BGR,100,Bulgaria,42.7339,25.4858,6948445 +854,BF,BFA,854,Burkina Faso,12.2383,-1.5616,20903278 +104,MM,MMR,104,Burma,21.9162,95.956,54409794 +108,BI,BDI,108,Burundi,-3.3731,29.9189,11890781 +132,CV,CPV,132,Cabo Verde,16.5388,-23.0418,555988 +116,KH,KHM,116,Cambodia,11.55,104.9167,16718971 +120,CM,CMR,120,Cameroon,3.848,11.5021,26545864 +140,CF,CAF,140,Central African Republic,6.6111,20.9394,4829764 +148,TD,TCD,148,Chad,15.4542,18.7322,16425859 +152,CL,CHL,152,Chile,-35.6751,-71.543,19116209 +170,CO,COL,170,Colombia,4.5709,-74.2973,50882884 +178,CG,COG,178,Congo (Brazzaville),-0.228,15.8277,5518092 +180,CD,COD,180,Congo (Kinshasa),-4.0383,21.7587,89561404 +174,KM,COM,174,Comoros,-11.6455,43.3333,869595 +188,CR,CRI,188,Costa Rica,9.7489,-83.7534,5094114 +384,CI,CIV,384,Cote d'Ivoire,7.54,-5.5471,26378275 +191,HR,HRV,191,Croatia,45.1,15.2,4105268 +192,CU,CUB,192,Cuba,21.521757,-77.781167,11326616 +196,CY,CYP,196,Cyprus,35.1264,33.4299,1207361 +203,CZ,CZE,203,Czechia,49.8175,15.473,10708982 +208,DK,DNK,208,Denmark,56.2639,9.5018,5837213 +262,DJ,DJI,262,Djibouti,11.8251,42.5903,988002 +212,DM,DMA,212,Dominica,15.415,-61.371,71991 +214,DO,DOM,214,Dominican Republic,18.7357,-70.1627,10847904 +218,EC,ECU,218,Ecuador,-1.8312,-78.1834,17643060 +818,EG,EGY,818,Egypt,26.820553,30.802498,102334403 +222,SV,SLV,222,El Salvador,13.7942,-88.8965,6486201 +226,GQ,GNQ,226,Equatorial Guinea,1.6508,10.2679,1402985 +232,ER,ERI,232,Eritrea,15.1794,39.7823,3546427 +233,EE,EST,233,Estonia,58.5953,25.0136,1326539 +748,SZ,SWZ,748,Eswatini,-26.5225,31.4659,1160164 +231,ET,ETH,231,Ethiopia,9.145,40.4897,114963583 +242,FJ,FJI,242,Fiji,-17.7134,178.065,896444 +246,FI,FIN,246,Finland,61.92411,25.748151,5540718 +250,FR,FRA,250,France,46.2276,2.2137,65249843 +266,GA,GAB,266,Gabon,-0.8037,11.6094,2225728 +270,GM,GMB,270,Gambia,13.4432,-15.3101,2416664 +268,GE,GEO,268,Georgia,42.3154,43.3569,3989175 +276,DE,DEU,276,Germany,51.165691,10.451526,83155031 +288,GH,GHA,288,Ghana,7.9465,-1.0232,31072945 +300,GR,GRC,300,Greece,39.0742,21.8243,10423056 +308,GD,GRD,308,Grenada,12.1165,-61.679,112519 +320,GT,GTM,320,Guatemala,15.7835,-90.2308,17915567 +324,GN,GIN,324,Guinea,9.9456,-9.6966,13132792 +624,GW,GNB,624,Guinea-Bissau,11.8037,-15.1804,1967998 +328,GY,GUY,328,Guyana,4.860416,-58.93018,786559 +332,HT,HTI,332,Haiti,18.9712,-72.2852,11402533 +336,VA,VAT,336,Holy See,41.9029,12.4534,809 +340,HN,HND,340,Honduras,15.2,-86.2419,9904608 +348,HU,HUN,348,Hungary,47.1625,19.5033,9660350 +352,IS,ISL,352,Iceland,64.9631,-19.0208,341250 +356,IN,IND,356,India,20.593684,78.96288,1380004385 +360,ID,IDN,360,Indonesia,-0.7893,113.9213,273523621 +364,IR,IRN,364,Iran,32.427908,53.688046,83992953 +368,IQ,IRQ,368,Iraq,33.223191,43.679291,40222503 +372,IE,IRL,372,Ireland,53.1424,-7.6921,4937796 +376,IL,ISR,376,Israel,31.046051,34.851612,8655541 +380,IT,ITA,380,Italy,41.87194,12.56738,60461828 +388,JM,JAM,388,Jamaica,18.1096,-77.2975,2961161 +392,JP,JPN,392,Japan,36.204824,138.252924,126476458 +400,JO,JOR,400,Jordan,31.24,36.51,10203140 +398,KZ,KAZ,398,Kazakhstan,48.0196,66.9237,18776707 +404,KE,KEN,404,Kenya,-0.0236,37.9062,53771300 +296,KI,KIR,296,Kiribati,-3.3704,-168.734,117606 +408,KP,PRK,408,"Korea, North",40.3399,127.5101,25778815 +410,KR,KOR,410,"Korea, South",35.907757,127.766922,51269183 +383,XK,XKS,383,Kosovo,42.602636,20.902977,1810366 +414,KW,KWT,414,Kuwait,29.31166,47.481766,4270563 +417,KG,KGZ,417,Kyrgyzstan,41.20438,74.766098,6524191 +418,LA,LAO,418,Laos,19.85627,102.495496,7275556 +428,LV,LVA,428,Latvia,56.8796,24.6032,1886202 +422,LB,LBN,422,Lebanon,33.8547,35.8623,6825442 +426,LS,LSO,426,Lesotho,-29.61,28.2336,2142252 +430,LR,LBR,430,Liberia,6.428055,-9.429499,5057677 +434,LY,LBY,434,Libya,26.3351,17.228331,6871287 +438,LI,LIE,438,Liechtenstein,47.14,9.55,38137 +440,LT,LTU,440,Lithuania,55.1694,23.8813,2722291 +442,LU,LUX,442,Luxembourg,49.8153,6.1296,625976 +450,MG,MDG,450,Madagascar,-18.766947,46.869107,27691019 +454,MW,MWI,454,Malawi,-13.2543,34.3015,19129955 +458,MY,MYS,458,Malaysia,4.210484,101.975766,32365998 +462,MV,MDV,462,Maldives,3.2028,73.2207,540542 +466,ML,MLI,466,Mali,17.570692,-3.996166,20250834 +470,MT,MLT,470,Malta,35.9375,14.3754,441539 +584,MH,MHL,584,Marshall Islands,7.1315,171.1845,58413 +478,MR,MRT,478,Mauritania,21.0079,-10.9408,4649660 +480,MU,MUS,480,Mauritius,-20.348404,57.552152,1271767 +484,MX,MEX,484,Mexico,23.6345,-102.5528,127792286 +583,FM,FSM,583,Micronesia,7.4256,150.5508,113815 +498,MD,MDA,498,Moldova,47.4116,28.3699,4027690 +492,MC,MCO,492,Monaco,43.7333,7.4167,39244 +496,MN,MNG,496,Mongolia,46.8625,103.8467,3278292 +499,ME,MNE,499,Montenegro,42.708678,19.37439,628062 +504,MA,MAR,504,Morocco,31.7917,-7.0926,36910558 +508,MZ,MOZ,508,Mozambique,-18.665695,35.529562,31255435 +516,NA,NAM,516,Namibia,-22.9576,18.4904,2540916 +520,NR,NRU,520,Nauru,-0.5228,166.9315,10834 +524,NP,NPL,524,Nepal,28.1667,84.25,29136808 +528,NL,NLD,528,Netherlands,52.1326,5.2913,17134873 +554,NZ,NZL,554,New Zealand,-40.9006,174.886,4822233 +558,NI,NIC,558,Nicaragua,12.865416,-85.207229,6624554 +562,NE,NER,562,Niger,17.607789,8.081666,24206636 +566,NG,NGA,566,Nigeria,9.082,8.6753,206139587 +807,MK,MKD,807,North Macedonia,41.6086,21.7453,2083380 +578,NO,NOR,578,Norway,60.472,8.4689,5421242 +512,OM,OMN,512,Oman,21.512583,55.923255,5106622 +586,PK,PAK,586,Pakistan,30.3753,69.3451,220892331 +585,PW,PLW,8,Palau,7.515,134.5825,18008 +591,PA,PAN,591,Panama,8.538,-80.7821,4314768 +598,PG,PNG,598,Papua New Guinea,-6.314993,143.95555,8947027 +600,PY,PRY,600,Paraguay,-23.4425,-58.4438,7132530 +604,PE,PER,604,Peru,-9.19,-75.0152,32971846 +608,PH,PHL,608,Philippines,12.879721,121.774017,109581085 +616,PL,POL,616,Poland,51.9194,19.1451,37846605 +620,PT,PRT,620,Portugal,39.3999,-8.2245,10196707 +634,QA,QAT,634,Qatar,25.3548,51.1839,2881060 +642,RO,ROU,642,Romania,45.9432,24.9668,19237682 +643,RU,RUS,643,Russia,61.52401,105.318756,145934460 +646,RW,RWA,646,Rwanda,-1.9403,29.8739,12952209 +659,KN,KNA,659,Saint Kitts and Nevis,17.357822,-62.782998,53192 +662,LC,LCA,662,Saint Lucia,13.9094,-60.9789,183629 +670,VC,VCT,670,Saint Vincent and the Grenadines,12.9843,-61.2872,110947 +882,WS,WSM,882,Samoa,-13.759,-172.1046,196130 +674,SM,SMR,674,San Marino,43.9424,12.4578,33938 +678,ST,STP,678,Sao Tome and Principe,0.1864,6.6131,219161 +682,SA,SAU,682,Saudi Arabia,23.885942,45.079162,34813867 +686,SN,SEN,686,Senegal,14.4974,-14.4524,16743930 +688,RS,SRB,688,Serbia,44.0165,21.0059,8737370 +690,SC,SYC,690,Seychelles,-4.6796,55.492,98340 +694,SL,SLE,694,Sierra Leone,8.460555,-11.779889,7976985 +702,SG,SGP,702,Singapore,1.2833,103.8333,5850343 +703,SK,SVK,703,Slovakia,48.669,19.699,5434712 +705,SI,SVN,705,Slovenia,46.1512,14.9955,2078932 +90,SB,SLB,90,Solomon Islands,-9.6457,160.1562,652858 +706,SO,SOM,706,Somalia,5.152149,46.199616,15893219 +710,ZA,ZAF,710,South Africa,-30.5595,22.9375,59308690 +728,SS,SSD,728,South Sudan,6.877,31.307,11193729 +724,ES,ESP,724,Spain,40.463667,-3.74922,46754783 +144,LK,LKA,144,Sri Lanka,7.873054,80.771797,21413250 +729,SD,SDN,729,Sudan,12.8628,30.2176,43849269 +740,SR,SUR,740,Suriname,3.9193,-56.0278,586634 +752,SE,SWE,752,Sweden,60.128161,18.643501,10099270 +756,CH,CHE,756,Switzerland,46.8182,8.2275,8654618 +760,SY,SYR,760,Syria,34.802075,38.996815,17500657 +158,TW,TWN,158,Taiwan*,23.7,121,23816775 +762,TJ,TJK,762,Tajikistan,38.861,71.2761,9537642 +834,TZ,TZA,834,Tanzania,-6.369028,34.888822,59734213 +764,TH,THA,764,Thailand,15.870032,100.992541,69799978 +626,TL,TLS,626,Timor-Leste,-8.874217,125.727539,1318442 +768,TG,TGO,768,Togo,8.6195,0.8248,8278737 +776,TO,TON,776,Tonga,-21.179,-175.1982,105697 +780,TT,TTO,780,Trinidad and Tobago,10.6918,-61.2225,1399491 +788,TN,TUN,788,Tunisia,33.886917,9.537499,11818618 +792,TR,TUR,792,Turkey,38.9637,35.2433,84339067 +798,TV,TUV,798,Tuvalu,-7.1095,177.6493,11792 +800,UG,UGA,800,Uganda,1.373333,32.290275,45741000 +804,UA,UKR,804,Ukraine,48.3794,31.1656,43733759 +784,AE,ARE,784,United Arab Emirates,23.424076,53.847818,9890400 +826,GB,GBR,826,United Kingdom,55.3781,-3.436,67886004 +858,UY,URY,858,Uruguay,-32.5228,-55.7658,3473727 +860,UZ,UZB,860,Uzbekistan,41.377491,64.585262,33469199 +548,VU,VUT,548,Vanuatu,-15.3767,166.9592,292680 +862,VE,VEN,862,Venezuela,6.4238,-66.5897,28435943 +704,VN,VNM,704,Vietnam,14.058324,108.277199,97338583 +275,PS,PSE,275,West Bank and Gaza,31.9522,35.2332,5101416 +732,EH,ESH,732,Western Sahara,24.2155,-12.8858,597330 +887,YE,YEM,887,Yemen,15.552727,48.516388,29825968 +894,ZM,ZMB,894,Zambia,-13.133897,27.849332,18383956 +716,ZW,ZWE,716,Zimbabwe,-19.015438,29.154857,14862927 +36,AU,AUS,36,Australia,-25,133,25459700 +124,CA,CAN,124,Canada,60,-95,38246108 +156,CN,CHN,156,China,35.8617,104.19545,1411778724 +840,US,USA,840,US,40,-100,329466283 diff --git a/dotnet/samples/LearnResources/Resources/WomensSuffrage.txt b/dotnet/samples/LearnResources/Resources/WomensSuffrage.txt new file mode 100644 index 000000000000..3100274682f2 --- /dev/null +++ b/dotnet/samples/LearnResources/Resources/WomensSuffrage.txt @@ -0,0 +1,9 @@ +Women's suffrage is when women got the right to vote. A long time ago, only men could vote and make decisions. This was not fair because women should have the same rights as men. Women wanted to vote too, so they started asking for it. It took a long time, and they had to work very hard to make people listen to them. Many men did not think women should vote, and this made it very hard for the women. + +The women who fought for voting were called suffragets. They did many things to show they wanted the right to vote. Some gave speeches, others made signs and marched in the streets. Some even went to jail because they refused to stop fighting for what they believed was right. It was scary for some of the women, but they knew how important it was to keep trying. They wanted to change the world so that it was more fair for everyone. + +One of the most important suffragets was Susan B. Anthony. She worked very hard to help women get the right to vote. She gave speeches and wrote letters to the goverment to make them change the laws. Susan never gave up, even when people said mean things to her. Another important person was Elizabeth Cady Stanton. She also helped fight for women's rights and was friends with Susan B. Anthony. Together, they made a great team and helped make big changes. + +Finally, in 1920, the 19th amendment was passed in the United States. This law gave women the right to vote. It was a huge victory for the suffragets, and they were very happy. Many women went to vote for the first time, and it felt like they were finally equal with men. It took many years and a lot of hard work, but the women never gave up. They kept fighting until they won. + +Women's suffrage is very important because it shows that if you work hard and believe in something, you can make a change. The women who fought for the right to vote showed bravery and strengh, and they helped make the world a better place. Today, women can vote because of them, and it's important to remember their hard work. We should always stand up for what is right, just like the suffragets did. diff --git a/dotnet/samples/README.md b/dotnet/samples/README.md index 7fc1771758bb..d890526adfee 100644 --- a/dotnet/samples/README.md +++ b/dotnet/samples/README.md @@ -1,9 +1,10 @@ ## Semantic Kernel Samples -| Type | Description | -| ------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------- | -| [`GettingStarted`](./GettingStarted/README.md) | Take this step by step tutorial to get started with the Semantic Kernel and get introduced to the key concepts. | -| [`GettingStartedWithAgents`](./GettingStartedWithAgents/README.md) | Take this step by step tutorial to get started with the Semantic Kernel Agents and get introduced to the key concepts. | -| [`Concepts`](./Concepts/README.md) | This section contains focussed samples which illustrate all of the concepts included in the Semantic Kernel. | -| [`Demos`](./Demos/README.md) | Look here to find a sample which demonstrate how to use many of Semantic Kernel features. | -| [`LearnResources`](./LearnResources/README.md) | Code snippets that are related to online documentation sources like Microsoft Learn, DevBlogs and others | +| Type | Description | +| ------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------- | +| [`GettingStarted`](./GettingStarted/README.md) | Take this step by step tutorial to get started with the Semantic Kernel and get introduced to the key concepts. | +| [`GettingStartedWithAgents`](./GettingStartedWithAgents/README.md) | Take this step by step tutorial to get started with the Semantic Kernel Agents and get introduced to the key concepts. | +| [`GettingStartedWithProcesses`](./GettingStartedWithProcesses/README.md) | Take this step by step tutorial to get started with the Semantic Kernel Processes and get introduced to the key concepts. | +| [`Concepts`](./Concepts/README.md) | This section contains focused samples which illustrate all of the concepts included in the Semantic Kernel. | +| [`Demos`](./Demos/README.md) | Look here to find a sample which demonstrate how to use many of Semantic Kernel features. | +| [`LearnResources`](./LearnResources/README.md) | Code snippets that are related to online documentation sources like Microsoft Learn, DevBlogs and others | diff --git a/dotnet/src/Agents/Abstractions/AgentChannel.cs b/dotnet/src/Agents/Abstractions/AgentChannel.cs index 34f7a8030896..9eee5c336085 100644 --- a/dotnet/src/Agents/Abstractions/AgentChannel.cs +++ b/dotnet/src/Agents/Abstractions/AgentChannel.cs @@ -40,10 +40,26 @@ public abstract class AgentChannel /// The agent actively interacting with the chat. /// The to monitor for cancellation requests. The default is . /// Asynchronous enumeration of messages. + /// + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// protected internal abstract IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( Agent agent, CancellationToken cancellationToken = default); + /// + /// Perform a discrete incremental interaction between a single and with streaming results. + /// + /// The agent actively interacting with the chat. + /// The receiver for the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of streaming messages. + protected internal abstract IAsyncEnumerable InvokeStreamingAsync( + Agent agent, + IList messages, + CancellationToken cancellationToken = default); + /// /// Retrieve the message history specific to this channel. /// @@ -68,6 +84,10 @@ public abstract class AgentChannel : AgentChannel where TAgent : Agent /// The agent actively interacting with the chat. /// The to monitor for cancellation requests. The default is . /// Asynchronous enumeration of messages. + /// + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// protected internal abstract IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( TAgent agent, CancellationToken cancellationToken = default); @@ -84,4 +104,33 @@ public abstract class AgentChannel : AgentChannel where TAgent : Agent return this.InvokeAsync((TAgent)agent, cancellationToken); } + /// + /// Process a discrete incremental interaction between a single an a . + /// + /// The agent actively interacting with the chat. + /// The receiver for the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// + protected internal abstract IAsyncEnumerable InvokeStreamingAsync( + TAgent agent, + IList messages, + CancellationToken cancellationToken = default); + + /// + protected internal override IAsyncEnumerable InvokeStreamingAsync( + Agent agent, + IList messages, + CancellationToken cancellationToken = default) + { + if (agent.GetType() != typeof(TAgent)) + { + throw new KernelException($"Invalid agent channel: {typeof(TAgent).Name}/{agent.GetType().Name}"); + } + + return this.InvokeStreamingAsync((TAgent)agent, messages, cancellationToken); + } } diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs index cdc46024ece7..7f2d09af6569 100644 --- a/dotnet/src/Agents/Abstractions/AgentChat.cs +++ b/dotnet/src/Agents/Abstractions/AgentChat.cs @@ -56,6 +56,13 @@ public abstract class AgentChat /// Asynchronous enumeration of messages. public abstract IAsyncEnumerable InvokeAsync(CancellationToken cancellationToken = default); + /// + /// Process a series of interactions between the agents participating in this chat. + /// + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public abstract IAsyncEnumerable InvokeStreamingAsync(CancellationToken cancellationToken = default); + /// /// Retrieve the chat history. /// @@ -205,7 +212,7 @@ protected async IAsyncEnumerable InvokeAgentAsync( { // Get or create the required channel and block until channel is synchronized. // Will throw exception when propagating a processing failure. - AgentChannel channel = await GetOrCreateChannelAsync().ConfigureAwait(false); + AgentChannel channel = await this.GetOrCreateChannelAsync(agent, cancellationToken).ConfigureAwait(false); // Invoke agent & process response List messages = []; @@ -240,29 +247,57 @@ protected async IAsyncEnumerable InvokeAgentAsync( { this.ClearActivitySignal(); // Signal activity hash completed } + } - async Task GetOrCreateChannelAsync() - { - string channelKey = this.GetAgentHash(agent); - AgentChannel? channel = await this.SynchronizeChannelAsync(channelKey, cancellationToken).ConfigureAwait(false); - if (channel is null) - { - this.Logger.LogAgentChatCreatingChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id); + /// + /// Process a discrete incremental interaction between a single an a . + /// + /// The agent actively interacting with the chat. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// Any instance does not support concurrent invocation and + /// will throw exception if concurrent activity is attempted. + /// + protected async IAsyncEnumerable InvokeStreamingAgentAsync( + Agent agent, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.SetActivityOrThrow(); // Disallow concurrent access to chat history - channel = await agent.CreateChannelAsync(cancellationToken).ConfigureAwait(false); + this.Logger.LogAgentChatInvokingAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id); - this._agentChannels.Add(channelKey, channel); + try + { + // Get or create the required channel and block until channel is synchronized. + // Will throw exception when propagating a processing failure. + AgentChannel channel = await this.GetOrCreateChannelAsync(agent, cancellationToken).ConfigureAwait(false); - if (this.History.Count > 0) - { - // Sync channel with existing history - await channel.ReceiveAsync(this.History, cancellationToken).ConfigureAwait(false); - } + // Invoke agent & process response + ChatHistory messages = []; - this.Logger.LogAgentChatCreatedChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id); + await foreach (StreamingChatMessageContent streamingContent in channel.InvokeStreamingAsync(agent, messages, cancellationToken).ConfigureAwait(false)) + { + yield return streamingContent; } - return channel; + this.History.AddRange(messages); + + this.Logger.LogAgentChatInvokedStreamingAgentMessages(nameof(InvokeAgentAsync), agent.GetType(), agent.Id, messages); + + // Broadcast message to other channels (in parallel) + // Note: Able to queue messages without synchronizing channels. + var channelRefs = + this._agentChannels + .Where(kvp => kvp.Value != channel) + .Select(kvp => new ChannelReference(kvp.Value, kvp.Key)); + this._broadcastQueue.Enqueue(channelRefs, messages); + + this.Logger.LogAgentChatInvokedAgent(nameof(InvokeAgentAsync), agent.GetType(), agent.Id); + } + finally + { + this.ClearActivitySignal(); // Signal activity hash completed } } @@ -308,7 +343,7 @@ private void ClearActivitySignal() /// The activity signal is used to manage ability and visibility for taking actions based /// on conversation history. /// - private void SetActivityOrThrow() + protected void SetActivityOrThrow() { // Note: Interlocked is the absolute lightest synchronization mechanism available in dotnet. int wasActive = Interlocked.CompareExchange(ref this._isActive, 1, 0); @@ -331,6 +366,30 @@ private string GetAgentHash(Agent agent) return hash; } + private async Task GetOrCreateChannelAsync(Agent agent, CancellationToken cancellationToken) + { + string channelKey = this.GetAgentHash(agent); + AgentChannel? channel = await this.SynchronizeChannelAsync(channelKey, cancellationToken).ConfigureAwait(false); + if (channel is null) + { + this.Logger.LogAgentChatCreatingChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id); + + channel = await agent.CreateChannelAsync(cancellationToken).ConfigureAwait(false); + + this._agentChannels.Add(channelKey, channel); + + if (this.History.Count > 0) + { + // Sync channel with existing history + await channel.ReceiveAsync(this.History, cancellationToken).ConfigureAwait(false); + } + + this.Logger.LogAgentChatCreatedChannel(nameof(InvokeAgentAsync), agent.GetType(), agent.Id); + } + + return channel; + } + private async Task SynchronizeChannelAsync(string channelKey, CancellationToken cancellationToken) { if (this._agentChannels.TryGetValue(channelKey, out AgentChannel? channel)) diff --git a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs index c7123abf9b71..05adb1e2af04 100644 --- a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs +++ b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -13,11 +14,13 @@ internal sealed class AggregatorChannel(AgentChat chat) : AgentChannel protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken = default) { return this._chat.GetChatMessagesAsync(cancellationToken); } + /// protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(AggregatorAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) { ChatMessageContent? lastMessage = null; @@ -47,6 +50,36 @@ protected internal override IAsyncEnumerable GetHistoryAsync } } + /// + protected internal override async IAsyncEnumerable InvokeStreamingAsync(AggregatorAgent agent, IList messages, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + int messageCount = await this._chat.GetChatMessagesAsync(cancellationToken).CountAsync(cancellationToken).ConfigureAwait(false); + + if (agent.Mode == AggregatorMode.Flat) + { + await foreach (StreamingChatMessageContent message in this._chat.InvokeStreamingAsync(cancellationToken).ConfigureAwait(false)) + { + yield return message; + } + } + + ChatMessageContent[] history = await this._chat.GetChatMessagesAsync(cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); + if (history.Length > messageCount) + { + if (agent.Mode == AggregatorMode.Flat) + { + for (int index = messageCount; index < messages.Count; ++index) + { + messages.Add(history[index]); + } + } + else if (agent.Mode == AggregatorMode.Nested) + { + messages.Add(history[history.Length - 1]); + } + } + } + /// protected internal override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken = default) { diff --git a/dotnet/src/Agents/Abstractions/KernelAgent.cs b/dotnet/src/Agents/Abstractions/KernelAgent.cs index 1df425972495..b54ad05a578c 100644 --- a/dotnet/src/Agents/Abstractions/KernelAgent.cs +++ b/dotnet/src/Agents/Abstractions/KernelAgent.cs @@ -1,4 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; + namespace Microsoft.SemanticKernel.Agents; /// @@ -7,8 +12,20 @@ namespace Microsoft.SemanticKernel.Agents; public abstract class KernelAgent : Agent { /// - /// The instructions of the agent (optional) + /// Arguments for the agent instruction parameters (optional). /// + /// + /// Also includes . + /// + public KernelArguments? Arguments { get; init; } + + /// + /// The instructions for the agent (optional) + /// + /// + /// Instructions may be formatted in "semantic-kernel" template format. + /// () + /// public string? Instructions { get; init; } /// @@ -18,4 +35,77 @@ public abstract class KernelAgent : Agent /// Defaults to empty Kernel, but may be overridden. /// public Kernel Kernel { get; init; } = new(); + + /// + /// A prompt-template based on the agent instructions. + /// + protected IPromptTemplate? Template { get; set; } + + /// + /// Format the system instructions for the agent. + /// + /// The containing services, plugins, and other state for use by the agent. + /// Optional arguments to pass to the agents's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// The formatted system instructions for the agent + protected async Task FormatInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) + { + // If is not set, default instructions may be treated as "semantic-kernel" template. + if (this.Template == null) + { + if (string.IsNullOrWhiteSpace(this.Instructions)) + { + return null; + } + + KernelPromptTemplateFactory templateFactory = new(this.LoggerFactory); + this.Template = templateFactory.Create(new PromptTemplateConfig(this.Instructions!)); + } + + return await this.Template.RenderAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + } + + /// + /// Provide a merged instance of with precedence for override arguments. + /// + /// The override arguments + /// + /// This merge preserves original and parameters. + /// and allows for incremental addition or replacement of specific parameters while also preserving the ability + /// to override the execution settings. + /// + protected KernelArguments? MergeArguments(KernelArguments? arguments) + { + // Avoid merge when default arguments are not set. + if (this.Arguments == null) + { + return arguments; + } + + // Avoid merge when override arguments are not set. + if (arguments == null) + { + return this.Arguments; + } + + // Both instances are not null, merge with precedence for override arguments. + + // Merge execution settings with precedence for override arguments. + Dictionary? settings = + (arguments.ExecutionSettings ?? s_emptySettings) + .Concat(this.Arguments.ExecutionSettings ?? s_emptySettings) + .GroupBy(entry => entry.Key) + .ToDictionary(entry => entry.Key, entry => entry.First().Value); + + // Merge parameters with precedence for override arguments. + Dictionary? parameters = + arguments + .Concat(this.Arguments) + .GroupBy(entry => entry.Key) + .ToDictionary(entry => entry.Key, entry => entry.First().Value); + + return new KernelArguments(parameters, settings); + } + + private static readonly Dictionary s_emptySettings = []; } diff --git a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs index 314d68ce8cd8..ebd9e83b42ce 100644 --- a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs +++ b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using Microsoft.Extensions.Logging; @@ -61,7 +62,7 @@ public static partial void LogAgentChatAddingMessages( [LoggerMessage( EventId = 0, Level = LogLevel.Information, - Message = "[{MethodName}] Adding Messages: {MessageCount}.")] + Message = "[{MethodName}] Added Messages: {MessageCount}.")] public static partial void LogAgentChatAddedMessages( this ILogger logger, string methodName, @@ -94,6 +95,31 @@ public static partial void LogAgentChatInvokedAgentMessage( string agentId, ChatMessageContent message); + /// + /// Logs retrieval of streamed messages. + /// + private static readonly Action s_logAgentChatInvokedStreamingAgentMessages = + LoggerMessage.Define( + logLevel: LogLevel.Debug, + eventId: 0, + "[{MethodName}] Agent message {AgentType}/{AgentId}: {Message}."); + + public static void LogAgentChatInvokedStreamingAgentMessages( + this ILogger logger, + string methodName, + Type agentType, + string agentId, + IList messages) + { + if (logger.IsEnabled(LogLevel.Debug)) + { + foreach (ChatMessageContent message in messages) + { + s_logAgentChatInvokedStreamingAgentMessages(logger, methodName, agentType, agentId, message, null); + } + } + } + /// /// Logs invoked agent (complete). /// diff --git a/dotnet/src/Agents/Core/AgentGroupChat.cs b/dotnet/src/Agents/Core/AgentGroupChat.cs index bff8f90f34b3..3b2a2c9ba788 100644 --- a/dotnet/src/Agents/Core/AgentGroupChat.cs +++ b/dotnet/src/Agents/Core/AgentGroupChat.cs @@ -59,18 +59,44 @@ public void AddAgent(Agent agent) public override async IAsyncEnumerable InvokeAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) { this.EnsureStrategyLoggerAssignment(); + this.EnsureCompletionStatus(); - if (this.IsComplete) + this.Logger.LogAgentGroupChatInvokingAgents(nameof(InvokeAsync), this.Agents); + + for (int index = 0; index < this.ExecutionSettings.TerminationStrategy.MaximumIterations; index++) { - // Throw exception if chat is completed and automatic-reset is not enabled. - if (!this.ExecutionSettings.TerminationStrategy.AutomaticReset) + // Identify next agent using strategy + Agent agent = await this.SelectAgentAsync(cancellationToken).ConfigureAwait(false); + + // Invoke agent and process messages along with termination + await foreach (var message in this.InvokeAsync(agent, cancellationToken).ConfigureAwait(false)) { - throw new KernelException("Agent Failure - Chat has completed."); + yield return message; } - this.IsComplete = false; + if (this.IsComplete) + { + break; + } } + this.Logger.LogAgentGroupChatYield(nameof(InvokeAsync), this.IsComplete); + } + + /// + /// Process a series of interactions between the that have joined this . + /// The interactions will proceed according to the and the + /// defined via . + /// In the absence of an , this method will not invoke any agents. + /// Any agent may be explicitly selected by calling . + /// + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of streaming messages. + public override async IAsyncEnumerable InvokeStreamingAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.EnsureStrategyLoggerAssignment(); + this.EnsureCompletionStatus(); + this.Logger.LogAgentGroupChatInvokingAgents(nameof(InvokeAsync), this.Agents); for (int index = 0; index < this.ExecutionSettings.TerminationStrategy.MaximumIterations; index++) @@ -79,7 +105,7 @@ public override async IAsyncEnumerable InvokeAsync([Enumerat Agent agent = await this.SelectAgentAsync(cancellationToken).ConfigureAwait(false); // Invoke agent and process messages along with termination - await foreach (var message in this.InvokeAsync(agent, cancellationToken).ConfigureAwait(false)) + await foreach (var message in this.InvokeStreamingAsync(agent, cancellationToken).ConfigureAwait(false)) { yield return message; } @@ -112,7 +138,36 @@ public async IAsyncEnumerable InvokeAsync( this.AddAgent(agent); - await foreach (var message in base.InvokeAgentAsync(agent, cancellationToken).ConfigureAwait(false)) + await foreach (ChatMessageContent message in base.InvokeAgentAsync(agent, cancellationToken).ConfigureAwait(false)) + { + yield return message; + } + + this.IsComplete = await this.ExecutionSettings.TerminationStrategy.ShouldTerminateAsync(agent, this.History, cancellationToken).ConfigureAwait(false); + + this.Logger.LogAgentGroupChatYield(nameof(InvokeAsync), this.IsComplete); + } + + /// + /// Process a single interaction between a given an a . + /// + /// The agent actively interacting with the chat. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// Specified agent joins the chat. + /// + public async IAsyncEnumerable InvokeStreamingAsync( + Agent agent, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + this.EnsureStrategyLoggerAssignment(); + + this.Logger.LogAgentGroupChatInvokingAgent(nameof(InvokeAsync), agent.GetType(), agent.Id); + + this.AddAgent(agent); + + await foreach (StreamingChatMessageContent message in base.InvokeStreamingAgentAsync(agent, cancellationToken).ConfigureAwait(false)) { yield return message; } @@ -122,6 +177,31 @@ public async IAsyncEnumerable InvokeAsync( this.Logger.LogAgentGroupChatYield(nameof(InvokeAsync), this.IsComplete); } + /// + /// Convenience method to create a for a given strategy without HTML encoding the specified parameters. + /// + /// The prompt template string that defines the prompt. + /// + /// On optional to use when interpreting the . + /// The default factory will be used when none is provided. + /// + /// The parameter names to exclude from being HTML encoded. + /// A created via using the specified template. + /// + /// This is particularly targeted to easily avoid encoding the history used by + /// or . + /// + public static KernelFunction CreatePromptFunctionForStrategy(string template, IPromptTemplateFactory? templateFactory = null, params string[] safeParameterNames) + { + PromptTemplateConfig config = + new(template) + { + InputVariables = safeParameterNames.Select(parameterName => new InputVariable { Name = parameterName, AllowDangerouslySetContent = true }).ToList() + }; + + return KernelFunctionFactory.CreateFromPrompt(config, promptTemplateFactory: templateFactory); + } + /// /// Initializes a new instance of the class. /// @@ -146,12 +226,25 @@ private void EnsureStrategyLoggerAssignment() } } + private void EnsureCompletionStatus() + { + if (this.IsComplete) + { + // Throw exception if chat is completed and automatic-reset is not enabled. + if (!this.ExecutionSettings.TerminationStrategy.AutomaticReset) + { + throw new KernelException("Agent Failure - Chat has completed."); + } + + this.IsComplete = false; + } + } + private async Task SelectAgentAsync(CancellationToken cancellationToken) { this.Logger.LogAgentGroupChatSelectingAgent(nameof(InvokeAsync), this.ExecutionSettings.SelectionStrategy.GetType()); Agent agent; - try { agent = await this.ExecutionSettings.SelectionStrategy.NextAsync(this.Agents, this.History, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs b/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs index 00ea8c1e2965..ca73ab5ccc8b 100644 --- a/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs +++ b/dotnet/src/Agents/Core/Chat/KernelFunctionSelectionStrategy.cs @@ -2,9 +2,10 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Text.Json; using System.Threading; using System.Threading.Tasks; +using Microsoft.SemanticKernel.Agents.History; +using Microsoft.SemanticKernel.Agents.Internal; namespace Microsoft.SemanticKernel.Agents.Chat; @@ -16,12 +17,12 @@ namespace Microsoft.SemanticKernel.Agents.Chat; public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel kernel) : SelectionStrategy { /// - /// The default value for . + /// The default value for . /// public const string DefaultAgentsVariableName = "_agents_"; /// - /// The default value for . + /// The default value for . /// public const string DefaultHistoryVariableName = "_history_"; @@ -42,20 +43,25 @@ public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel ker /// public KernelArguments? Arguments { get; init; } + /// + /// The used when invoking . + /// + public Kernel Kernel => kernel; + /// /// The invoked as selection criteria. /// public KernelFunction Function { get; } = function; /// - /// When set, will use in the event of a failure to select an agent. + /// Optionally specify a to reduce the history. /// - public bool UseInitialAgentAsFallback { get; init; } + public IChatHistoryReducer? HistoryReducer { get; init; } /// - /// The used when invoking . + /// When set, will use in the event of a failure to select an agent. /// - public Kernel Kernel => kernel; + public bool UseInitialAgentAsFallback { get; init; } /// /// A callback responsible for translating the @@ -66,12 +72,14 @@ public class KernelFunctionSelectionStrategy(KernelFunction function, Kernel ker /// protected sealed override async Task SelectAgentAsync(IReadOnlyList agents, IReadOnlyList history, CancellationToken cancellationToken = default) { + history = await history.ReduceAsync(this.HistoryReducer, cancellationToken).ConfigureAwait(false); + KernelArguments originalArguments = this.Arguments ?? []; KernelArguments arguments = new(originalArguments, originalArguments.ExecutionSettings?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)) { { this.AgentsVariableName, string.Join(",", agents.Select(a => a.Name)) }, - { this.HistoryVariableName, JsonSerializer.Serialize(history) }, // TODO: GitHub Task #5894 + { this.HistoryVariableName, ChatMessageForPrompt.Format(history) }, }; this.Logger.LogKernelFunctionSelectionStrategyInvokingFunction(nameof(NextAsync), this.Function.PluginName, this.Function.Name); diff --git a/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs index e86cf9b5a09f..622366bc768d 100644 --- a/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs +++ b/dotnet/src/Agents/Core/Chat/KernelFunctionTerminationStrategy.cs @@ -2,9 +2,10 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Text.Json; using System.Threading; using System.Threading.Tasks; +using Microsoft.SemanticKernel.Agents.History; +using Microsoft.SemanticKernel.Agents.Internal; namespace Microsoft.SemanticKernel.Agents.Chat; @@ -43,14 +44,14 @@ public class KernelFunctionTerminationStrategy(KernelFunction function, Kernel k public KernelArguments? Arguments { get; init; } /// - /// The invoked as termination criteria. + /// The used when invoking . /// - public KernelFunction Function { get; } = function; + public Kernel Kernel => kernel; /// - /// The used when invoking . + /// The invoked as termination criteria. /// - public Kernel Kernel => kernel; + public KernelFunction Function { get; } = function; /// /// A callback responsible for translating the @@ -58,15 +59,22 @@ public class KernelFunctionTerminationStrategy(KernelFunction function, Kernel k /// public Func ResultParser { get; init; } = (_) => true; + /// + /// Optionally specify a to reduce the history. + /// + public IChatHistoryReducer? HistoryReducer { get; init; } + /// protected sealed override async Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken = default) { + history = await history.ReduceAsync(this.HistoryReducer, cancellationToken).ConfigureAwait(false); + KernelArguments originalArguments = this.Arguments ?? []; KernelArguments arguments = new(originalArguments, originalArguments.ExecutionSettings?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)) { { this.AgentVariableName, agent.Name ?? agent.Id }, - { this.HistoryVariableName, JsonSerializer.Serialize(history) }, // TODO: GitHub Task #5894 + { this.HistoryVariableName, ChatMessageForPrompt.Format(history) }, }; this.Logger.LogKernelFunctionTerminationStrategyInvokingFunction(nameof(ShouldAgentTerminateAsync), this.Function.PluginName, this.Function.Name); diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs index 212c56038484..cb814969600f 100644 --- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs +++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; using System.Runtime.CompilerServices; +using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel.ChatCompletion; @@ -12,11 +13,35 @@ namespace Microsoft.SemanticKernel.Agents; /// A specialization based on . /// /// -/// NOTE: Enable OpenAIPromptExecutionSettings.ToolCallBehavior for agent plugins. -/// () +/// NOTE: Enable for agent plugins. +/// () /// public sealed class ChatCompletionAgent : ChatHistoryKernelAgent { + /// + /// Initializes a new instance of the class. + /// + public ChatCompletionAgent() { } + + /// + /// Initializes a new instance of the class from + /// a . + /// + /// Prompt template configuration + /// An optional factory to produce the for the agent + /// + /// When 'templateFactory' parameter is not provided, the default is used. + /// + public ChatCompletionAgent( + PromptTemplateConfig templateConfig, + IPromptTemplateFactory? templateFactory = null) + { + this.Name = templateConfig.Name; + this.Description = templateConfig.Description; + this.Instructions = templateConfig.Template; + this.Template = templateFactory?.Create(templateConfig); + } + /// public override async IAsyncEnumerable InvokeAsync( ChatHistory history, @@ -25,11 +50,11 @@ public override async IAsyncEnumerable InvokeAsync( [EnumeratorCancellation] CancellationToken cancellationToken = default) { kernel ??= this.Kernel; - arguments ??= this.Arguments; + arguments = this.MergeArguments(arguments); - (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = this.GetChatCompletionService(kernel, arguments); + (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments); - ChatHistory chat = this.SetupAgentChatHistory(history); + ChatHistory chat = await this.SetupAgentChatHistoryAsync(history, arguments, kernel, cancellationToken).ConfigureAwait(false); int messageCount = chat.Count; @@ -54,7 +79,7 @@ await chatCompletionService.GetChatMessageContentsAsync( history.Add(message); } - foreach (ChatMessageContent message in messages ?? []) + foreach (ChatMessageContent message in messages) { message.AuthorName = this.Name; @@ -70,11 +95,11 @@ public override async IAsyncEnumerable InvokeStream [EnumeratorCancellation] CancellationToken cancellationToken = default) { kernel ??= this.Kernel; - arguments ??= this.Arguments; + arguments = this.MergeArguments(arguments); - (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = this.GetChatCompletionService(kernel, arguments); + (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments); - ChatHistory chat = this.SetupAgentChatHistory(history); + ChatHistory chat = await this.SetupAgentChatHistoryAsync(history, arguments, kernel, cancellationToken).ConfigureAwait(false); int messageCount = chat.Count; @@ -89,10 +114,16 @@ public override async IAsyncEnumerable InvokeStream this.Logger.LogAgentChatServiceInvokedStreamingAgent(nameof(InvokeAsync), this.Id, chatCompletionService.GetType()); + AuthorRole? role = null; + StringBuilder builder = new(); await foreach (StreamingChatMessageContent message in messages.ConfigureAwait(false)) { + role = message.Role; + message.Role ??= AuthorRole.Assistant; message.AuthorName = this.Name; + builder.Append(message.ToString()); + yield return message; } @@ -105,9 +136,15 @@ public override async IAsyncEnumerable InvokeStream history.Add(message); } + + // Do not duplicate terminated function result to history + if (role != AuthorRole.Tool) + { + history.Add(new(role ?? AuthorRole.Assistant, builder.ToString()) { AuthorName = this.Name }); + } } - private (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments) + internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments) { // Need to provide a KernelFunction to the service selector as a container for the execution-settings. KernelFunction nullPrompt = KernelFunctionFactory.CreateFromPrompt("placeholder", arguments?.ExecutionSettings?.Values); @@ -120,13 +157,19 @@ public override async IAsyncEnumerable InvokeStream return (chatCompletionService, executionSettings); } - private ChatHistory SetupAgentChatHistory(IReadOnlyList history) + private async Task SetupAgentChatHistoryAsync( + IReadOnlyList history, + KernelArguments? arguments, + Kernel kernel, + CancellationToken cancellationToken) { ChatHistory chat = []; - if (!string.IsNullOrWhiteSpace(this.Instructions)) + string? instructions = await this.FormatInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + if (!string.IsNullOrWhiteSpace(instructions)) { - chat.Add(new ChatMessageContent(AuthorRole.System, this.Instructions) { AuthorName = this.Name }); + chat.Add(new ChatMessageContent(AuthorRole.System, instructions) { AuthorName = this.Name }); } chat.AddRange(history); diff --git a/dotnet/src/Agents/Core/ChatHistoryChannel.cs b/dotnet/src/Agents/Core/ChatHistoryChannel.cs index 0ff06a39b222..29eb89a447a7 100644 --- a/dotnet/src/Agents/Core/ChatHistoryChannel.cs +++ b/dotnet/src/Agents/Core/ChatHistoryChannel.cs @@ -76,6 +76,30 @@ bool IsMessageVisible(ChatMessageContent message) => messageQueue.Count == 0); } + /// + protected override async IAsyncEnumerable InvokeStreamingAsync(Agent agent, IList messages, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (agent is not ChatHistoryKernelAgent historyAgent) + { + throw new KernelException($"Invalid channel binding for agent: {agent.Id} ({agent.GetType().FullName})"); + } + + // Pre-process history reduction. + await historyAgent.ReduceAsync(this._history, cancellationToken).ConfigureAwait(false); + + int messageCount = this._history.Count; + + await foreach (StreamingChatMessageContent streamingMessage in historyAgent.InvokeStreamingAsync(this._history, null, null, cancellationToken).ConfigureAwait(false)) + { + yield return streamingMessage; + } + + for (int index = messageCount; index < this._history.Count; ++index) + { + messages.Add(this._history[index]); + } + } + /// protected override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken) { diff --git a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs index b14363c4bb44..0eee62920027 100644 --- a/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs +++ b/dotnet/src/Agents/Core/ChatHistoryKernelAgent.cs @@ -13,27 +13,42 @@ namespace Microsoft.SemanticKernel.Agents; /// A specialization bound to a . /// /// -/// NOTE: Enable OpenAIPromptExecutionSettings.ToolCallBehavior for agent plugins. -/// () +/// NOTE: Enable for agent plugins. +/// () /// public abstract class ChatHistoryKernelAgent : KernelAgent { /// - /// Optional arguments for the agent. + /// Optionally specify a to reduce the history. /// - public KernelArguments? Arguments { get; init; } - - /// + /// + /// This is automatically applied to the history before invoking the agent, only when using + /// an . It must be explicitly applied via . + /// public IChatHistoryReducer? HistoryReducer { get; init; } - /// + /// + /// Invoke the assistant to respond to the provided history. + /// + /// The conversation history. + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. public abstract IAsyncEnumerable InvokeAsync( ChatHistory history, KernelArguments? arguments = null, Kernel? kernel = null, CancellationToken cancellationToken = default); - /// + /// + /// Invoke the assistant to respond to the provided history with streaming response. + /// + /// The conversation history. + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. public abstract IAsyncEnumerable InvokeStreamingAsync( ChatHistory history, KernelArguments? arguments = null, @@ -45,7 +60,7 @@ public abstract IAsyncEnumerable InvokeStreamingAsy ///
/// The source history /// The to monitor for cancellation requests. The default is . - /// + /// True if reduction has occurred. public Task ReduceAsync(ChatHistory history, CancellationToken cancellationToken = default) => history.ReduceAsync(this.HistoryReducer, cancellationToken); diff --git a/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs b/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs index c884846baafa..f7b243e99013 100644 --- a/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs +++ b/dotnet/src/Agents/Core/History/ChatHistoryReducerExtensions.cs @@ -163,4 +163,21 @@ public static async Task ReduceAsync(this ChatHistory history, IChatHistor return true; } + + /// + /// Reduce the history using the provided reducer without mutating the source history. + /// + /// The source history + /// The target reducer + /// The to monitor for cancellation requests. The default is . + public static async Task> ReduceAsync(this IReadOnlyList history, IChatHistoryReducer? reducer, CancellationToken cancellationToken) + { + if (reducer != null) + { + IEnumerable? reducedHistory = await reducer.ReduceAsync(history, cancellationToken).ConfigureAwait(false); + history = reducedHistory?.ToArray() ?? history; + } + + return history; + } } diff --git a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs index a45bfa57011d..67720ab45112 100644 --- a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs +++ b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs @@ -29,7 +29,7 @@ public class ChatHistorySummarizationReducer : IChatHistoryReducer ///
public const string DefaultSummarizationPrompt = """ - Provide a concise and complete summarizion of the entire dialog that does not exceed 5 sentences + Provide a concise and complete summarization of the entire dialog that does not exceed 5 sentences This summary must always: - Consider both user and assistant interactions @@ -80,7 +80,7 @@ Provide a concise and complete summarizion of the entire dialog that does not ex IEnumerable summarizedHistory = history.Extract( this.UseSingleSummary ? 0 : insertionPoint, - truncationIndex, + truncationIndex - 1, (m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent)); try @@ -154,7 +154,9 @@ public override bool Equals(object? obj) ChatHistorySummarizationReducer? other = obj as ChatHistorySummarizationReducer; return other != null && this._thresholdCount == other._thresholdCount && - this._targetCount == other._targetCount; + this._targetCount == other._targetCount && + this.UseSingleSummary == other.UseSingleSummary && + string.Equals(this.SummarizationInstructions, other.SummarizationInstructions, StringComparison.Ordinal); } /// diff --git a/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs b/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs new file mode 100644 index 000000000000..2ec91664ce4b --- /dev/null +++ b/dotnet/src/Agents/Core/Internal/ChatMessageForPrompt.cs @@ -0,0 +1,44 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.Internal; + +/// +/// Present a for serialization without metadata. +/// +/// The referenced message +internal sealed class ChatMessageForPrompt(ChatMessageContent message) +{ + private static readonly JsonSerializerOptions s_jsonOptions = new() { WriteIndented = true }; + + /// + /// The string representation of the property. + /// + public string Role => message.Role.Label; + + /// + /// The referenced property. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Name => message.AuthorName; + + /// + /// The referenced property. + /// + public string Content => message.Content ?? string.Empty; + + /// + /// Convenience method to reference a set of messages. + /// + public static IEnumerable Prepare(IEnumerable messages) => + messages.Where(m => !string.IsNullOrWhiteSpace(m.Content)).Select(m => new ChatMessageForPrompt(m)); + + /// + /// Convenience method to format a set of messages for use in a prompt. + /// + public static string Format(IEnumerable messages) => + JsonSerializer.Serialize(Prepare(messages).ToArray(), s_jsonOptions); +} diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj index 222ea5c5be88..a5a4cde76d6f 100644 --- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj +++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj @@ -19,6 +19,7 @@ + @@ -32,7 +33,7 @@ - + diff --git a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs deleted file mode 100644 index cfc7a905cfc7..000000000000 --- a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs +++ /dev/null @@ -1,483 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System.Collections.Generic; -using System.Linq; -using System.Net; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI.Assistants; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Microsoft.SemanticKernel.Agents.OpenAI; - -/// -/// Actions associated with an Open Assistant thread. -/// -internal static class AssistantThreadActions -{ - private const string FunctionDelimiter = "-"; - - private static readonly HashSet s_pollingStatuses = - [ - RunStatus.Queued, - RunStatus.InProgress, - RunStatus.Cancelling, - ]; - - private static readonly HashSet s_terminalStatuses = - [ - RunStatus.Expired, - RunStatus.Failed, - RunStatus.Cancelled, - ]; - - /// - /// Create a message in the specified thread. - /// - /// The assistant client - /// The thread identifier - /// The message to add - /// The to monitor for cancellation requests. The default is . - /// if a system message is present, without taking any other action - public static async Task CreateMessageAsync(AssistantsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken) - { - if (message.Items.Any(i => i is FunctionCallContent)) - { - return; - } - - await client.CreateMessageAsync( - threadId, - message.Role.ToMessageRole(), - message.Content, - cancellationToken: cancellationToken).ConfigureAwait(false); - } - - /// - /// Retrieves the thread messages. - /// - /// The assistant client - /// The thread identifier - /// The to monitor for cancellation requests. The default is . - /// Asynchronous enumeration of messages. - public static async IAsyncEnumerable GetMessagesAsync(AssistantsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken) - { - Dictionary agentNames = []; // Cache agent names by their identifier - - PageableList messages; - - string? lastId = null; - do - { - messages = await client.GetMessagesAsync(threadId, limit: 100, ListSortOrder.Descending, after: lastId, null, cancellationToken).ConfigureAwait(false); - foreach (ThreadMessage message in messages) - { - string? assistantName = null; - if (!string.IsNullOrWhiteSpace(message.AssistantId) && - !agentNames.TryGetValue(message.AssistantId, out assistantName)) - { - Assistant assistant = await client.GetAssistantAsync(message.AssistantId, cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(assistant.Name)) - { - agentNames.Add(assistant.Id, assistant.Name); - } - } - - assistantName ??= message.AssistantId; - - ChatMessageContent content = GenerateMessageContent(assistantName, message); - - if (content.Items.Count > 0) - { - yield return content; - } - - lastId = message.Id; - } - } - while (messages.HasMore); - } - - /// - /// Invoke the assistant on the specified thread. - /// - /// The assistant agent to interact with the thread. - /// The assistant client - /// The thread identifier - /// Config to utilize when polling for run state. - /// The logger to utilize (might be agent or channel scoped) - /// The plugins and other state. - /// Optional arguments to pass to the agents's invocation, including any . - /// The to monitor for cancellation requests. The default is . - /// Asynchronous enumeration of messages. - /// - /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. - /// - public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( - OpenAIAssistantAgent agent, - AssistantsClient client, - string threadId, - OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration, - ILogger logger, - Kernel kernel, - KernelArguments? arguments, - [EnumeratorCancellation] CancellationToken cancellationToken) - { - if (agent.IsDeleted) - { - throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}."); - } - - ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name, FunctionDelimiter)))]; - - logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId); - - CreateRunOptions options = - new(agent.Id) - { - OverrideInstructions = agent.Instructions, - OverrideTools = tools, - }; - - // Create run - ThreadRun run = await client.CreateRunAsync(threadId, options, cancellationToken).ConfigureAwait(false); - - logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId); - - // Evaluate status and process steps and messages, as encountered. - HashSet processedStepIds = []; - Dictionary functionSteps = []; - - do - { - // Poll run and steps until actionable - PageableList steps = await PollRunStatusAsync().ConfigureAwait(false); - - // Is in terminal state? - if (s_terminalStatuses.Contains(run.Status)) - { - throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); - } - - // Is tool action required? - if (run.Status == RunStatus.RequiresAction) - { - logger.LogOpenAIAssistantProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId); - - // Execute functions in parallel and post results at once. - FunctionCallContent[] activeFunctionSteps = steps.Data.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); - if (activeFunctionSteps.Length > 0) - { - // Emit function-call content - yield return (IsVisible: false, Message: GenerateFunctionCallContent(agent.GetName(), activeFunctionSteps)); - - // Invoke functions for each tool-step - IEnumerable> functionResultTasks = ExecuteFunctionSteps(agent, activeFunctionSteps, cancellationToken); - - // Block for function results - FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false); - - // Process tool output - ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); - - await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false); - } - - logger.LogOpenAIAssistantProcessedRunSteps(nameof(InvokeAsync), activeFunctionSteps.Length, run.Id, threadId); - } - - // Enumerate completed messages - logger.LogOpenAIAssistantProcessingRunMessages(nameof(InvokeAsync), run.Id, threadId); - - IEnumerable completedStepsToProcess = - steps - .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id)) - .OrderBy(s => s.CreatedAt); - - int messageCount = 0; - foreach (RunStep completedStep in completedStepsToProcess) - { - if (completedStep.Type.Equals(RunStepType.ToolCalls)) - { - RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)completedStep.StepDetails; - - foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls) - { - bool isVisible = false; - ChatMessageContent? content = null; - - // Process code-interpreter content - if (toolCall is RunStepCodeInterpreterToolCall toolCodeInterpreter) - { - content = GenerateCodeInterpreterContent(agent.GetName(), toolCodeInterpreter); - isVisible = true; - } - // Process function result content - else if (toolCall is RunStepFunctionToolCall toolFunction) - { - FunctionCallContent functionStep = functionSteps[toolFunction.Id]; // Function step always captured on invocation - content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolFunction.Output); - } - - if (content is not null) - { - ++messageCount; - - yield return (isVisible, Message: content); - } - } - } - else if (completedStep.Type.Equals(RunStepType.MessageCreation)) - { - RunStepMessageCreationDetails messageCreationDetails = (RunStepMessageCreationDetails)completedStep.StepDetails; - - // Retrieve the message - ThreadMessage? message = await RetrieveMessageAsync(messageCreationDetails, cancellationToken).ConfigureAwait(false); - - if (message is not null) - { - ChatMessageContent content = GenerateMessageContent(agent.GetName(), message); - - if (content.Items.Count > 0) - { - ++messageCount; - - yield return (IsVisible: true, Message: content); - } - } - } - - processedStepIds.Add(completedStep.Id); - } - - logger.LogOpenAIAssistantProcessedRunMessages(nameof(InvokeAsync), messageCount, run.Id, threadId); - } - while (RunStatus.Completed != run.Status); - - logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run.Id, threadId); - - // Local function to assist in run polling (participates in method closure). - async Task> PollRunStatusAsync() - { - logger.LogOpenAIAssistantPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId); - - int count = 0; - - do - { - // Reduce polling frequency after a couple attempts - await Task.Delay(count >= 2 ? pollingConfiguration.RunPollingInterval : pollingConfiguration.RunPollingBackoff, cancellationToken).ConfigureAwait(false); - ++count; - -#pragma warning disable CA1031 // Do not catch general exception types - try - { - run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false); - } - catch - { - // Retry anyway.. - } -#pragma warning restore CA1031 // Do not catch general exception types - } - while (s_pollingStatuses.Contains(run.Status)); - - logger.LogOpenAIAssistantPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId); - - return await client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - // Local function to capture kernel function state for further processing (participates in method closure). - IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step) - { - if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails) - { - foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType()) - { - var nameParts = FunctionName.Parse(toolCall.Name, FunctionDelimiter); - - KernelArguments functionArguments = []; - if (!string.IsNullOrWhiteSpace(toolCall.Arguments)) - { - Dictionary arguments = JsonSerializer.Deserialize>(toolCall.Arguments)!; - foreach (var argumentKvp in arguments) - { - functionArguments[argumentKvp.Key] = argumentKvp.Value.ToString(); - } - } - - var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments); - - functionSteps.Add(toolCall.Id, content); - - yield return content; - } - } - } - - async Task RetrieveMessageAsync(RunStepMessageCreationDetails detail, CancellationToken cancellationToken) - { - ThreadMessage? message = null; - - bool retry = false; - int count = 0; - do - { - try - { - message = await client.GetMessageAsync(threadId, detail.MessageCreation.MessageId, cancellationToken).ConfigureAwait(false); - } - catch (RequestFailedException exception) - { - // Step has provided the message-id. Retry on of NotFound/404 exists. - // Extremely rarely there might be a synchronization issue between the - // assistant response and message-service. - retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3; - } - - if (retry) - { - await Task.Delay(pollingConfiguration.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); - } - - ++count; - } - while (retry); - - return message; - } - } - - private static ChatMessageContent GenerateMessageContent(string? assistantName, ThreadMessage message) - { - AuthorRole role = new(message.Role.ToString()); - - ChatMessageContent content = - new(role, content: null) - { - AuthorName = assistantName, - }; - - foreach (MessageContent itemContent in message.ContentItems) - { - // Process text content - if (itemContent is MessageTextContent contentMessage) - { - content.Items.Add(new TextContent(contentMessage.Text.Trim())); - - foreach (MessageTextAnnotation annotation in contentMessage.Annotations) - { - content.Items.Add(GenerateAnnotationContent(annotation)); - } - } - // Process image content - else if (itemContent is MessageImageFileContent contentImage) - { - content.Items.Add(new FileReferenceContent(contentImage.FileId)); - } - } - - return content; - } - - private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation) - { - string? fileId = null; - if (annotation is MessageTextFileCitationAnnotation citationAnnotation) - { - fileId = citationAnnotation.FileId; - } - else if (annotation is MessageTextFilePathAnnotation pathAnnotation) - { - fileId = pathAnnotation.FileId; - } - - return - new() - { - Quote = annotation.Text, - StartIndex = annotation.StartIndex, - EndIndex = annotation.EndIndex, - FileId = fileId, - }; - } - - private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, RunStepCodeInterpreterToolCall contentCodeInterpreter) - { - return - new ChatMessageContent( - AuthorRole.Assistant, - [ - new TextContent(contentCodeInterpreter.Input) - ]) - { - AuthorName = agentName, - Metadata = new Dictionary { { OpenAIAssistantAgent.CodeInterpreterMetadataKey, true } }, - }; - } - - private static ChatMessageContent GenerateFunctionCallContent(string agentName, FunctionCallContent[] functionSteps) - { - ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null) - { - AuthorName = agentName - }; - - functionCallContent.Items.AddRange(functionSteps); - - return functionCallContent; - } - - private static ChatMessageContent GenerateFunctionResultContent(string agentName, FunctionCallContent functionStep, string result) - { - ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null) - { - AuthorName = agentName - }; - - functionCallContent.Items.Add( - new FunctionResultContent( - functionStep.FunctionName, - functionStep.PluginName, - functionStep.Id, - result)); - - return functionCallContent; - } - - private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionSteps, CancellationToken cancellationToken) - { - Task[] functionTasks = new Task[functionSteps.Length]; - - for (int index = 0; index < functionSteps.Length; ++index) - { - functionTasks[index] = functionSteps[index].InvokeAsync(agent.Kernel, cancellationToken); - } - - return functionTasks; - } - - private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults) - { - ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length]; - - for (int index = 0; index < functionResults.Length; ++index) - { - FunctionResultContent functionResult = functionResults[index]; - - object resultValue = functionResult.Result ?? string.Empty; - - if (resultValue is not string textResult) - { - textResult = JsonSerializer.Serialize(resultValue); - } - - toolOutputs[index] = new ToolOutput(functionResult.CallId, textResult!); - } - - return toolOutputs; - } -} diff --git a/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs index cd4e80c3abf1..895482927515 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs +++ b/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs @@ -1,6 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs index 9665fb680498..c4acca58770f 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs +++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs @@ -2,7 +2,7 @@ using System; using System.Collections.Generic; using System.Linq; -using Azure.AI.OpenAI.Assistants; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; @@ -13,9 +13,8 @@ internal static class KernelFunctionExtensions ///
/// The source function /// The plugin name - /// The delimiter character /// An OpenAI tool definition - public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName, string delimiter) + public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName) { var metadata = function.Metadata; if (metadata.Parameters.Count > 0) @@ -47,10 +46,17 @@ public static FunctionToolDefinition ToToolDefinition(this KernelFunction functi required, }; - return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description, BinaryData.FromObjectAsJson(spec)); + return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName)) + { + Description = function.Description, + Parameters = BinaryData.FromObjectAsJson(spec) + }; } - return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description); + return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName)) + { + Description = function.Description + }; } private static string ConvertType(Type? type) diff --git a/dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs similarity index 87% rename from dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs rename to dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs index 084e533fe757..d017fb403f23 100644 --- a/dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs +++ b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs @@ -2,7 +2,7 @@ using Azure.Core; using Azure.Core.Pipeline; -namespace Microsoft.SemanticKernel.Agents.OpenAI.Azure; +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; /// /// Helper class to inject headers into Azure SDK HTTP pipeline diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs new file mode 100644 index 000000000000..532a8433c37c --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantCreationOptionsFactory.cs @@ -0,0 +1,77 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Produce the for an assistant according to the requested configuration. +/// +internal static class AssistantCreationOptionsFactory +{ + public static AssistantCreationOptions CreateAssistantOptions(this PromptTemplateConfig templateConfig, OpenAIAssistantCapabilities capabilities) + { + AssistantCreationOptions assistantCreationOptions = capabilities.CreateAssistantCreationOptions(templateConfig.TemplateFormat); + + assistantCreationOptions.Name = templateConfig.Name; + assistantCreationOptions.Instructions = templateConfig.Template; + assistantCreationOptions.Description = templateConfig.Description; + + return assistantCreationOptions; + } + + public static AssistantCreationOptions CreateAssistantOptions(this OpenAIAssistantDefinition definition) + { + AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantCreationOptions(PromptTemplateConfig.SemanticKernelTemplateFormat); + + assistantCreationOptions.Name = definition.Name; + assistantCreationOptions.Instructions = definition.Instructions; + assistantCreationOptions.Description = definition.Description; + + return assistantCreationOptions; + } + + private static AssistantCreationOptions CreateAssistantCreationOptions(this OpenAIAssistantCapabilities definition, string templateFormat) + { + AssistantCreationOptions assistantCreationOptions = + new() + { + ToolResources = + AssistantToolResourcesFactory.GenerateToolResources( + definition.EnableFileSearch ? definition.VectorStoreId : null, + definition.EnableCodeInterpreter ? definition.CodeInterpreterFileIds : null), + ResponseFormat = definition.EnableJsonResponse ? AssistantResponseFormat.JsonObject : AssistantResponseFormat.Auto, + Temperature = definition.Temperature, + NucleusSamplingFactor = definition.TopP, + }; + + if (definition.Metadata != null) + { + foreach (KeyValuePair item in definition.Metadata) + { + assistantCreationOptions.Metadata[item.Key] = item.Value; + } + } + + assistantCreationOptions.Metadata[OpenAIAssistantAgent.TemplateMetadataKey] = templateFormat; + + if (definition.ExecutionOptions != null) + { + string optionsJson = JsonSerializer.Serialize(definition.ExecutionOptions); + assistantCreationOptions.Metadata[OpenAIAssistantAgent.OptionsMetadataKey] = optionsJson; + } + + if (definition.EnableCodeInterpreter) + { + assistantCreationOptions.Tools.Add(ToolDefinition.CreateCodeInterpreter()); + } + + if (definition.EnableFileSearch) + { + assistantCreationOptions.Tools.Add(ToolDefinition.CreateFileSearch()); + } + + return assistantCreationOptions; + } +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs new file mode 100644 index 000000000000..8f1f60f2b936 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs @@ -0,0 +1,64 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Factory for creating based on . +/// Also able to produce . +/// +/// +/// Improves testability. +/// +internal static class AssistantMessageFactory +{ + /// + /// Produces based on . + /// + /// The message content. + public static MessageCreationOptions CreateOptions(ChatMessageContent message) + { + MessageCreationOptions options = new(); + + if (message.Metadata != null) + { + foreach (var metadata in message.Metadata) + { + options.Metadata.Add(metadata.Key, metadata.Value?.ToString() ?? string.Empty); + } + } + + return options; + } + + /// + /// Translates into enumeration of . + /// + /// The message content. + public static IEnumerable GetMessageContents(ChatMessageContent message) + { + foreach (KernelContent content in message.Items) + { + if (content is TextContent textContent) + { + yield return MessageContent.FromText(content.ToString()); + } + else if (content is ImageContent imageContent) + { + if (imageContent.Uri != null) + { + yield return MessageContent.FromImageUri(imageContent.Uri); + } + else if (string.IsNullOrWhiteSpace(imageContent.DataUri)) + { + yield return MessageContent.FromImageUri(new(imageContent.DataUri!)); + } + } + else if (content is FileReferenceContent fileContent) + { + yield return MessageContent.FromImageFileId(fileContent.FileId); + } + } + } +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs new file mode 100644 index 000000000000..82663f556283 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs @@ -0,0 +1,56 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Factory for creating definition. +/// +/// +/// Improves testability. +/// +internal static class AssistantRunOptionsFactory +{ + /// + /// Produce by reconciling and . + /// + /// The assistant definition + /// Instructions to use for the run + /// The run specific options + public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, string? overrideInstructions, OpenAIAssistantInvocationOptions? invocationOptions) + { + int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount); + + RunCreationOptions options = + new() + { + AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions, + InstructionsOverride = overrideInstructions, + MaxOutputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens), + MaxInputTokenCount = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens), + ModelOverride = invocationOptions?.ModelName, + NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP), + AllowParallelToolCalls = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled), + ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null, + Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature), + TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null, + }; + + if (invocationOptions?.Metadata != null) + { + foreach (var metadata in invocationOptions.Metadata) + { + options.Metadata.Add(metadata.Key, metadata.Value ?? string.Empty); + } + } + + return options; + } + + private static TValue? ResolveExecutionSetting(TValue? setting, TValue? agentSetting) where TValue : struct + => + setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ? + setting.Value : + null; +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs new file mode 100644 index 000000000000..5bdd2a76fee7 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs @@ -0,0 +1,805 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Actions associated with an Open Assistant thread. +/// +internal static class AssistantThreadActions +{ + private static readonly HashSet s_pollingStatuses = + [ + RunStatus.Queued, + RunStatus.InProgress, + RunStatus.Cancelling, + ]; + + private static readonly HashSet s_terminalStatuses = + [ + RunStatus.Expired, + RunStatus.Failed, + RunStatus.Cancelled, + ]; + + /// + /// Create a new assistant thread. + /// + /// The assistant client + /// The options for creating the thread + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public static async Task CreateThreadAsync(AssistantClient client, OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default) + { + ThreadCreationOptions createOptions = + new() + { + ToolResources = AssistantToolResourcesFactory.GenerateToolResources(options?.VectorStoreId, options?.CodeInterpreterFileIds), + }; + + if (options?.Messages is not null) + { + foreach (ChatMessageContent message in options.Messages) + { + ThreadInitializationMessage threadMessage = new( + role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant, + content: AssistantMessageFactory.GetMessageContents(message)); + + createOptions.InitialMessages.Add(threadMessage); + } + } + + if (options?.Metadata != null) + { + foreach (KeyValuePair item in options.Metadata) + { + createOptions.Metadata[item.Key] = item.Value; + } + } + + AssistantThread thread = await client.CreateThreadAsync(createOptions, cancellationToken).ConfigureAwait(false); + + return thread.Id; + } + + /// + /// Create a message in the specified thread. + /// + /// The assistant client + /// The thread identifier + /// The message to add + /// The to monitor for cancellation requests. The default is . + /// if a system message is present, without taking any other action + public static async Task CreateMessageAsync(AssistantClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken) + { + if (message.Items.Any(i => i is FunctionCallContent)) + { + return; + } + + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + await client.CreateMessageAsync( + threadId, + message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant, + AssistantMessageFactory.GetMessageContents(message), + options, + cancellationToken).ConfigureAwait(false); + } + + /// + /// Retrieves the thread messages. + /// + /// The assistant client + /// The thread identifier + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public static async IAsyncEnumerable GetMessagesAsync(AssistantClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken) + { + Dictionary agentNames = []; // Cache agent names by their identifier + + await foreach (ThreadMessage message in client.GetMessagesAsync(threadId, new() { Order = MessageCollectionOrder.Descending }, cancellationToken).ConfigureAwait(false)) + { + string? assistantName = null; + if (!string.IsNullOrWhiteSpace(message.AssistantId) && + !agentNames.TryGetValue(message.AssistantId, out assistantName)) + { + Assistant assistant = await client.GetAssistantAsync(message.AssistantId, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(assistant.Name)) + { + agentNames.Add(assistant.Id, assistant.Name); + } + } + + assistantName ??= message.AssistantId; + + ChatMessageContent content = GenerateMessageContent(assistantName, message); + + if (content.Items.Count > 0) + { + yield return content; + } + } + } + + /// + /// Invoke the assistant on the specified thread. + /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user. + /// Example of a non-visible message is function-content for functions that are automatically executed. + /// + /// The assistant agent to interact with the thread. + /// The assistant client + /// The thread identifier + /// Options to utilize for the invocation + /// The logger to utilize (might be agent or channel scoped) + /// The plugins and other state. + /// Optional arguments to pass to the agents's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync( + OpenAIAssistantAgent agent, + AssistantClient client, + string threadId, + OpenAIAssistantInvocationOptions? invocationOptions, + ILogger logger, + Kernel kernel, + KernelArguments? arguments, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + if (agent.IsDeleted) + { + throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}."); + } + + logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId); + + ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions); + + options.ToolsOverride.AddRange(tools); + + ThreadRun run = await client.CreateRunAsync(threadId, agent.Id, options, cancellationToken).ConfigureAwait(false); + + logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId); + + // Evaluate status and process steps and messages, as encountered. + HashSet processedStepIds = []; + Dictionary functionSteps = []; + + do + { + // Poll run and steps until actionable + await PollRunStatusAsync().ConfigureAwait(false); + + // Is in terminal state? + if (s_terminalStatuses.Contains(run.Status)) + { + throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); + } + + IReadOnlyList steps = await GetRunStepsAsync(client, run, cancellationToken).ConfigureAwait(false); + + // Is tool action required? + if (run.Status == RunStatus.RequiresAction) + { + logger.LogOpenAIAssistantProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId); + + // Execute functions in parallel and post results at once. + FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + if (functionCalls.Length > 0) + { + // Emit function-call content + yield return (IsVisible: false, Message: GenerateFunctionCallContent(agent.GetName(), functionCalls)); + + // Invoke functions for each tool-step + IEnumerable> functionResultTasks = ExecuteFunctionSteps(agent, functionCalls, cancellationToken); + + // Block for function results + FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false); + + // Capture function-call for message processing + foreach (FunctionResultContent functionCall in functionResults) + { + functionSteps.Add(functionCall.CallId!, functionCall); + } + + // Process tool output + ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); + + await client.SubmitToolOutputsToRunAsync(threadId, run.Id, toolOutputs, cancellationToken).ConfigureAwait(false); + } + + logger.LogOpenAIAssistantProcessedRunSteps(nameof(InvokeAsync), functionCalls.Length, run.Id, threadId); + } + + // Enumerate completed messages + logger.LogOpenAIAssistantProcessingRunMessages(nameof(InvokeAsync), run.Id, threadId); + + IEnumerable completedStepsToProcess = + steps + .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id)) + .OrderBy(s => s.CreatedAt); + + int messageCount = 0; + foreach (RunStep completedStep in completedStepsToProcess) + { + if (completedStep.Type == RunStepType.ToolCalls) + { + foreach (RunStepToolCall toolCall in completedStep.Details.ToolCalls) + { + bool isVisible = false; + ChatMessageContent? content = null; + + // Process code-interpreter content + if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter) + { + content = GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput); + isVisible = true; + } + // Process function result content + else if (toolCall.ToolKind == RunStepToolCallKind.Function) + { + FunctionResultContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation + content = GenerateFunctionResultContent(agent.GetName(), [functionStep]); + } + + if (content is not null) + { + ++messageCount; + + yield return (isVisible, Message: content); + } + } + } + else if (completedStep.Type == RunStepType.MessageCreation) + { + // Retrieve the message + ThreadMessage? message = await RetrieveMessageAsync(client, threadId, completedStep.Details.CreatedMessageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); + + if (message is not null) + { + ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, completedStep); + + if (content.Items.Count > 0) + { + ++messageCount; + + yield return (IsVisible: true, Message: content); + } + } + } + + processedStepIds.Add(completedStep.Id); + } + + logger.LogOpenAIAssistantProcessedRunMessages(nameof(InvokeAsync), messageCount, run.Id, threadId); + } + while (RunStatus.Completed != run.Status); + + logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run.Id, threadId); + + // Local function to assist in run polling (participates in method closure). + async Task PollRunStatusAsync() + { + logger.LogOpenAIAssistantPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId); + + int count = 0; + + do + { + // Reduce polling frequency after a couple attempts + await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false); + ++count; + +#pragma warning disable CA1031 // Do not catch general exception types + try + { + run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false); + } + catch + { + // Retry anyway.. + } +#pragma warning restore CA1031 // Do not catch general exception types + } + while (s_pollingStatuses.Contains(run.Status)); + + logger.LogOpenAIAssistantPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId); + } + } + + /// + /// Invoke the assistant on the specified thread using streaming. + /// + /// The assistant agent to interact with the thread. + /// The assistant client + /// The thread identifier + /// The receiver for the completed messages generated + /// Options to utilize for the invocation + /// The logger to utilize (might be agent or channel scoped) + /// The plugins and other state. + /// Optional arguments to pass to the agents's invocation, including any . + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public static async IAsyncEnumerable InvokeStreamingAsync( + OpenAIAssistantAgent agent, + AssistantClient client, + string threadId, + IList? messages, + OpenAIAssistantInvocationOptions? invocationOptions, + ILogger logger, + Kernel kernel, + KernelArguments? arguments, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + if (agent.IsDeleted) + { + throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}."); + } + + logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId); + + ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))]; + + string? instructions = await agent.GetInstructionsAsync(kernel, arguments, cancellationToken).ConfigureAwait(false); + + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, instructions, invocationOptions); + + options.ToolsOverride.AddRange(tools); + + // Evaluate status and process steps and messages, as encountered. + HashSet processedStepIds = []; + Dictionary activeMessages = []; + ThreadRun? run = null; + RunStep? currentStep = null; + + IAsyncEnumerable asyncUpdates = client.CreateRunStreamingAsync(threadId, agent.Id, options, cancellationToken); + do + { + activeMessages.Clear(); + + await foreach (StreamingUpdate update in asyncUpdates.ConfigureAwait(false)) + { + if (update is RunUpdate runUpdate) + { + run = runUpdate.Value; + + switch (runUpdate.UpdateKind) + { + case StreamingUpdateReason.RunCreated: + logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId); + break; + } + } + else if (update is MessageContentUpdate contentUpdate) + { + switch (contentUpdate.UpdateKind) + { + case StreamingUpdateReason.MessageUpdated: + yield return GenerateStreamingMessageContent(agent.GetName(), contentUpdate); + break; + } + } + else if (update is MessageStatusUpdate statusUpdate) + { + switch (statusUpdate.UpdateKind) + { + case StreamingUpdateReason.MessageCompleted: + activeMessages.Add(statusUpdate.Value.Id, currentStep); + break; + } + } + else if (update is RunStepDetailsUpdate detailsUpdate) + { + StreamingChatMessageContent? toolContent = GenerateStreamingCodeInterpreterContent(agent.GetName(), detailsUpdate); + if (toolContent != null) + { + yield return toolContent; + } + } + else if (update is RunStepUpdate stepUpdate) + { + switch (stepUpdate.UpdateKind) + { + case StreamingUpdateReason.RunStepCreated: + currentStep = stepUpdate.Value; + break; + case StreamingUpdateReason.RunStepCompleted: + currentStep = null; + break; + default: + break; + } + } + } + + if (run == null) + { + throw new KernelException($"Agent Failure - Run not created for thread: ${threadId}"); + } + + // Is in terminal state? + if (s_terminalStatuses.Contains(run.Status)) + { + throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}"); + } + + if (run.Status == RunStatus.RequiresAction) + { + IReadOnlyList steps = await GetRunStepsAsync(client, run, cancellationToken).ConfigureAwait(false); + + // Execute functions in parallel and post results at once. + FunctionCallContent[] functionCalls = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray(); + if (functionCalls.Length > 0) + { + // Emit function-call content + messages?.Add(GenerateFunctionCallContent(agent.GetName(), functionCalls)); + + // Invoke functions for each tool-step + IEnumerable> functionResultTasks = ExecuteFunctionSteps(agent, functionCalls, cancellationToken); + + // Block for function results + FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false); + + // Process tool output + ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults); + asyncUpdates = client.SubmitToolOutputsToRunStreamingAsync(run.ThreadId, run.Id, toolOutputs, cancellationToken); + + messages?.Add(GenerateFunctionResultContent(agent.GetName(), functionResults)); + } + } + + if (activeMessages.Count > 0) + { + logger.LogOpenAIAssistantProcessingRunMessages(nameof(InvokeAsync), run!.Id, threadId); + + foreach (string messageId in activeMessages.Keys) + { + RunStep? step = activeMessages[messageId]; + ThreadMessage? message = await RetrieveMessageAsync(client, threadId, messageId, agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false); + + if (message != null) + { + ChatMessageContent content = GenerateMessageContent(agent.GetName(), message, step); + messages?.Add(content); + } + } + + logger.LogOpenAIAssistantProcessedRunMessages(nameof(InvokeAsync), activeMessages.Count, run!.Id, threadId); + } + } + while (run?.Status != RunStatus.Completed); + + logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run?.Id ?? "Failed", threadId); + } + + private static async Task> GetRunStepsAsync(AssistantClient client, ThreadRun run, CancellationToken cancellationToken) + { + List steps = []; + + await foreach (RunStep step in client.GetRunStepsAsync(run.ThreadId, run.Id, cancellationToken: cancellationToken).ConfigureAwait(false)) + { + steps.Add(step); + } + + return steps; + } + + private static ChatMessageContent GenerateMessageContent(string? assistantName, ThreadMessage message, RunStep? completedStep = null) + { + AuthorRole role = new(message.Role.ToString()); + + Dictionary? metaData = + completedStep != null ? + new Dictionary + { + { nameof(completedStep.CreatedAt), completedStep.CreatedAt }, + { nameof(MessageContentUpdate.MessageId), message.Id }, + { nameof(RunStepDetailsUpdate.StepId), completedStep.Id }, + { nameof(completedStep.RunId), completedStep.RunId }, + { nameof(completedStep.ThreadId), completedStep.ThreadId }, + { nameof(completedStep.AssistantId), completedStep.AssistantId }, + { nameof(completedStep.Usage), completedStep.Usage }, + } : + null; + + ChatMessageContent content = + new(role, content: null) + { + AuthorName = assistantName, + Metadata = metaData, + }; + + foreach (MessageContent itemContent in message.Content) + { + // Process text content + if (!string.IsNullOrEmpty(itemContent.Text)) + { + content.Items.Add(new TextContent(itemContent.Text)); + + foreach (TextAnnotation annotation in itemContent.TextAnnotations) + { + content.Items.Add(GenerateAnnotationContent(annotation)); + } + } + // Process image content + else if (itemContent.ImageFileId != null) + { + content.Items.Add(new FileReferenceContent(itemContent.ImageFileId)); + } + } + + return content; + } + + private static StreamingChatMessageContent GenerateStreamingMessageContent(string? assistantName, MessageContentUpdate update) + { + StreamingChatMessageContent content = + new(AuthorRole.Assistant, content: null) + { + AuthorName = assistantName, + }; + + // Process text content + if (!string.IsNullOrEmpty(update.Text)) + { + content.Items.Add(new StreamingTextContent(update.Text)); + } + // Process image content + else if (update.ImageFileId != null) + { + content.Items.Add(new StreamingFileReferenceContent(update.ImageFileId)); + } + // Process annotations + else if (update.TextAnnotation != null) + { + content.Items.Add(GenerateStreamingAnnotationContent(update.TextAnnotation)); + } + + if (update.Role.HasValue && update.Role.Value != MessageRole.User) + { + content.Role = new(update.Role.Value.ToString()); + } + + return content; + } + + private static StreamingChatMessageContent? GenerateStreamingCodeInterpreterContent(string? assistantName, RunStepDetailsUpdate update) + { + StreamingChatMessageContent content = + new(AuthorRole.Assistant, content: null) + { + AuthorName = assistantName, + }; + + // Process text content + if (update.CodeInterpreterInput != null) + { + content.Items.Add(new StreamingTextContent(update.CodeInterpreterInput)); + content.Metadata = new Dictionary { { OpenAIAssistantAgent.CodeInterpreterMetadataKey, true } }; + } + + if ((update.CodeInterpreterOutputs?.Count ?? 0) > 0) + { + foreach (var output in update.CodeInterpreterOutputs!) + { + if (output.ImageFileId != null) + { + content.Items.Add(new StreamingFileReferenceContent(output.ImageFileId)); + } + } + } + + return content.Items.Count > 0 ? content : null; + } + + private static AnnotationContent GenerateAnnotationContent(TextAnnotation annotation) + { + string? fileId = null; + + if (!string.IsNullOrEmpty(annotation.OutputFileId)) + { + fileId = annotation.OutputFileId; + } + else if (!string.IsNullOrEmpty(annotation.InputFileId)) + { + fileId = annotation.InputFileId; + } + + return + new(annotation.TextToReplace) + { + Quote = annotation.TextToReplace, + StartIndex = annotation.StartIndex, + EndIndex = annotation.EndIndex, + FileId = fileId, + }; + } + + private static StreamingAnnotationContent GenerateStreamingAnnotationContent(TextAnnotationUpdate annotation) + { + string? fileId = null; + + if (!string.IsNullOrEmpty(annotation.OutputFileId)) + { + fileId = annotation.OutputFileId; + } + else if (!string.IsNullOrEmpty(annotation.InputFileId)) + { + fileId = annotation.InputFileId; + } + + return + new(annotation.TextToReplace) + { + StartIndex = annotation.StartIndex ?? 0, + EndIndex = annotation.EndIndex ?? 0, + FileId = fileId, + }; + } + + private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode) + { + return + new ChatMessageContent( + AuthorRole.Assistant, + [ + new TextContent(pythonCode) + ]) + { + AuthorName = agentName, + Metadata = new Dictionary { { OpenAIAssistantAgent.CodeInterpreterMetadataKey, true } }, + }; + } + + private static IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step) + { + if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls) + { + foreach (RunStepToolCall toolCall in step.Details.ToolCalls) + { + (FunctionName nameParts, KernelArguments functionArguments) = ParseFunctionCall(toolCall.FunctionName, toolCall.FunctionArguments); + + FunctionCallContent content = new(nameParts.Name, nameParts.PluginName, toolCall.ToolCallId, functionArguments); + + yield return content; + } + } + } + + private static (FunctionName functionName, KernelArguments arguments) ParseFunctionCall(string functionName, string? functionArguments) + { + FunctionName nameParts = FunctionName.Parse(functionName); + + KernelArguments arguments = []; + + if (!string.IsNullOrWhiteSpace(functionArguments)) + { + foreach (var argumentKvp in JsonSerializer.Deserialize>(functionArguments!)!) + { + arguments[argumentKvp.Key] = argumentKvp.Value.ToString(); + } + } + + return (nameParts, arguments); + } + + private static ChatMessageContent GenerateFunctionCallContent(string agentName, IList functionCalls) + { + ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null) + { + AuthorName = agentName + }; + + functionCallContent.Items.AddRange(functionCalls); + + return functionCallContent; + } + + private static ChatMessageContent GenerateFunctionResultContent(string agentName, FunctionResultContent[] functionResults) + { + ChatMessageContent functionResultContent = new(AuthorRole.Tool, content: null) + { + AuthorName = agentName + }; + + foreach (FunctionResultContent functionResult in functionResults) + { + functionResultContent.Items.Add( + new FunctionResultContent( + functionResult.FunctionName, + functionResult.PluginName, + functionResult.CallId, + functionResult.Result)); + } + + return functionResultContent; + } + + private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionCalls, CancellationToken cancellationToken) + { + Task[] functionTasks = new Task[functionCalls.Length]; + + for (int index = 0; index < functionCalls.Length; ++index) + { + functionTasks[index] = ExecuteFunctionStep(agent, functionCalls[index], cancellationToken); + } + + return functionTasks; + } + + private static Task ExecuteFunctionStep(OpenAIAssistantAgent agent, FunctionCallContent functionCall, CancellationToken cancellationToken) + { + return functionCall.InvokeAsync(agent.Kernel, cancellationToken); + } + + private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults) + { + ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length]; + + for (int index = 0; index < functionResults.Length; ++index) + { + FunctionResultContent functionResult = functionResults[index]; + + object resultValue = functionResult.Result ?? string.Empty; + + if (resultValue is not string textResult) + { + textResult = JsonSerializer.Serialize(resultValue); + } + + toolOutputs[index] = new ToolOutput(functionResult.CallId, textResult!); + } + + return toolOutputs; + } + + private static async Task RetrieveMessageAsync(AssistantClient client, string threadId, string messageId, TimeSpan syncDelay, CancellationToken cancellationToken) + { + ThreadMessage? message = null; + + bool retry = false; + int count = 0; + do + { + try + { + message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false); + } + catch (RequestFailedException exception) + { + // Step has provided the message-id. Retry on of NotFound/404 exists. + // Extremely rarely there might be a synchronization issue between the + // assistant response and message-service. + retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3; + } + + if (retry) + { + await Task.Delay(syncDelay, cancellationToken).ConfigureAwait(false); + } + + ++count; + } + while (retry); + + return message; + } +} diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs new file mode 100644 index 000000000000..7c4000dcebb0 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs @@ -0,0 +1,52 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using OpenAI.Assistants; + +namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal; + +/// +/// Factory for creating definition. +/// +/// +/// Improves testability. +/// +internal static class AssistantToolResourcesFactory +{ + /// + /// Produces a definition based on the provided parameters. + /// + /// An optional vector-store-id for the 'file_search' tool + /// An optionallist of file-identifiers for the 'code_interpreter' tool. + public static ToolResources? GenerateToolResources(string? vectorStoreId, IReadOnlyList? codeInterpreterFileIds) + { + bool hasVectorStore = !string.IsNullOrWhiteSpace(vectorStoreId); + bool hasCodeInterpreterFiles = (codeInterpreterFileIds?.Count ?? 0) > 0; + + ToolResources? toolResources = null; + + if (hasVectorStore || hasCodeInterpreterFiles) + { + FileSearchToolResources? fileSearch = + hasVectorStore ? + new() + { + VectorStoreIds = { vectorStoreId! } + } : + null; + + CodeInterpreterToolResources? codeInterpreter = + hasCodeInterpreterFiles ? + new() : + null; + codeInterpreter?.FileIds.AddRange(codeInterpreterFileIds!); + + toolResources = new ToolResources + { + FileSearch = fileSearch, + CodeInterpreter = codeInterpreter + }; + } + + return toolResources; + } +} diff --git a/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs b/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs index bc7c8d9919f0..3a39c314c5c3 100644 --- a/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs +++ b/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs @@ -1,7 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.Diagnostics.CodeAnalysis; -using Azure.AI.OpenAI.Assistants; using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs index 6746c6c50d9a..67b77d7d4374 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs @@ -1,17 +1,16 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Runtime.CompilerServices; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI.Assistants; -using Azure.Core; -using Azure.Core.Pipeline; using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Agents.OpenAI.Azure; -using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; +using OpenAI.Files; namespace Microsoft.SemanticKernel.Agents.OpenAI; @@ -25,153 +24,181 @@ public sealed class OpenAIAssistantAgent : KernelAgent /// public const string CodeInterpreterMetadataKey = "code"; + internal const string OptionsMetadataKey = "__run_options"; + internal const string TemplateMetadataKey = "__template_format"; + + private readonly OpenAIClientProvider _provider; private readonly Assistant _assistant; - private readonly AssistantsClient _client; - private readonly OpenAIAssistantConfiguration _config; + private readonly AssistantClient _client; + private readonly string[] _channelKeys; /// - /// Optional arguments for the agent. + /// The assistant definition. /// - /// - /// This property is not currently used by the agent, but is provided for future extensibility. - /// - public KernelArguments? Arguments { get; init; } + public OpenAIAssistantDefinition Definition { get; private init; } /// - /// A list of previously uploaded file IDs to attach to the assistant. + /// Set when the assistant has been deleted via . + /// An assistant removed by other means will result in an exception when invoked. /// - public IReadOnlyList FileIds => this._assistant.FileIds; + public bool IsDeleted { get; private set; } /// - /// A set of up to 16 key/value pairs that can be attached to an agent, used for - /// storing additional information about that object in a structured format.Keys - /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// Defines polling behavior for run processing /// - public IReadOnlyDictionary Metadata => this._assistant.Metadata; + public RunPollingOptions PollingOptions { get; } = new(); /// - /// Expose predefined tools. + /// Expose predefined tools for run-processing. /// internal IReadOnlyList Tools => this._assistant.Tools; /// - /// Set when the assistant has been deleted via . - /// An assistant removed by other means will result in an exception when invoked. + /// Define a new . /// - public bool IsDeleted { get; private set; } + /// OpenAI client provider for accessing the API service. + /// Defines the assistant's capabilities. + /// The containing services, plugins, and other state for use throughout the operation. + /// Required arguments that provide default template parameters, including any . + /// Prompt template configuration + /// An optional factory to produce the for the agent + /// The to monitor for cancellation requests. The default is . + /// An instance + public async static Task CreateFromTemplateAsync( + OpenAIClientProvider clientProvider, + OpenAIAssistantCapabilities capabilities, + Kernel kernel, + KernelArguments defaultArguments, + PromptTemplateConfig templateConfig, + IPromptTemplateFactory? templateFactory = null, + CancellationToken cancellationToken = default) + { + // Validate input + Verify.NotNull(kernel, nameof(kernel)); + Verify.NotNull(defaultArguments, nameof(defaultArguments)); + Verify.NotNull(clientProvider, nameof(clientProvider)); + Verify.NotNull(capabilities, nameof(capabilities)); + Verify.NotNull(templateConfig, nameof(templateConfig)); + + // Ensure template is valid (avoid failure after posting assistant creation) + IPromptTemplate? template = templateFactory?.Create(templateConfig); + + // Create the client + AssistantClient client = CreateClient(clientProvider); + + // Create the assistant + AssistantCreationOptions assistantCreationOptions = templateConfig.CreateAssistantOptions(capabilities); + Assistant model = await client.CreateAssistantAsync(capabilities.ModelId, assistantCreationOptions, cancellationToken).ConfigureAwait(false); + + // Instantiate the agent + return + new OpenAIAssistantAgent(model, clientProvider, client) + { + Kernel = kernel, + Arguments = defaultArguments, + Template = template, + }; + } /// /// Define a new . /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Configuration for accessing the Assistants API service, such as the api-key. + /// OpenAI client provider for accessing the API service. /// The assistant definition. + /// The containing services, plugins, and other state for use throughout the operation. + /// Optional default arguments, including any . /// The to monitor for cancellation requests. The default is . /// An instance public static async Task CreateAsync( - Kernel kernel, - OpenAIAssistantConfiguration config, + OpenAIClientProvider clientProvider, OpenAIAssistantDefinition definition, + Kernel kernel, + KernelArguments? defaultArguments = null, CancellationToken cancellationToken = default) { // Validate input Verify.NotNull(kernel, nameof(kernel)); - Verify.NotNull(config, nameof(config)); + Verify.NotNull(clientProvider, nameof(clientProvider)); Verify.NotNull(definition, nameof(definition)); // Create the client - AssistantsClient client = CreateClient(config); + AssistantClient client = CreateClient(clientProvider); // Create the assistant - AssistantCreationOptions assistantCreationOptions = CreateAssistantCreationOptions(definition); - Assistant model = await client.CreateAssistantAsync(assistantCreationOptions, cancellationToken).ConfigureAwait(false); + AssistantCreationOptions assistantCreationOptions = definition.CreateAssistantOptions(); + Assistant model = await client.CreateAssistantAsync(definition.ModelId, assistantCreationOptions, cancellationToken).ConfigureAwait(false); // Instantiate the agent return - new OpenAIAssistantAgent(client, model, config) + new OpenAIAssistantAgent(model, clientProvider, client) { Kernel = kernel, + Arguments = defaultArguments }; } /// /// Retrieve a list of assistant definitions: . /// - /// Configuration for accessing the Assistants API service, such as the api-key. - /// The maximum number of assistant definitions to retrieve - /// The identifier of the assistant beyond which to begin selection. + /// Configuration for accessing the API service. /// The to monitor for cancellation requests. The default is . /// An list of objects. public static async IAsyncEnumerable ListDefinitionsAsync( - OpenAIAssistantConfiguration config, - int maxResults = 100, - string? lastId = null, + OpenAIClientProvider provider, [EnumeratorCancellation] CancellationToken cancellationToken = default) { // Create the client - AssistantsClient client = CreateClient(config); + AssistantClient client = CreateClient(provider); - // Retrieve the assistants - PageableList assistants; - - int resultCount = 0; - do + // Query and enumerate assistant definitions + await foreach (Assistant model in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = AssistantCollectionOrder.Descending }, cancellationToken).ConfigureAwait(false)) { - assistants = await client.GetAssistantsAsync(limit: Math.Min(maxResults, 100), ListSortOrder.Descending, after: lastId, cancellationToken: cancellationToken).ConfigureAwait(false); - foreach (Assistant assistant in assistants) - { - if (resultCount >= maxResults) - { - break; - } - - resultCount++; - - yield return - new() - { - Id = assistant.Id, - Name = assistant.Name, - Description = assistant.Description, - Instructions = assistant.Instructions, - EnableCodeInterpreter = assistant.Tools.Any(t => t is CodeInterpreterToolDefinition), - EnableRetrieval = assistant.Tools.Any(t => t is RetrievalToolDefinition), - FileIds = assistant.FileIds, - Metadata = assistant.Metadata, - ModelId = assistant.Model, - }; - - lastId = assistant.Id; - } + yield return CreateAssistantDefinition(model); } - while (assistants.HasMore && resultCount < maxResults); } /// /// Retrieve a by identifier. /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Configuration for accessing the Assistants API service, such as the api-key. + /// Configuration for accessing the API service. /// The agent identifier + /// The containing services, plugins, and other state for use throughout the operation. + /// Optional default arguments, including any . + /// An optional factory to produce the for the agent /// The to monitor for cancellation requests. The default is . /// An instance public static async Task RetrieveAsync( - Kernel kernel, - OpenAIAssistantConfiguration config, + OpenAIClientProvider clientProvider, string id, + Kernel kernel, + KernelArguments? defaultArguments = null, + IPromptTemplateFactory? templateFactory = null, CancellationToken cancellationToken = default) { + // Validate input + Verify.NotNull(kernel, nameof(kernel)); + Verify.NotNull(clientProvider, nameof(clientProvider)); + Verify.NotNullOrWhiteSpace(id, nameof(id)); + // Create the client - AssistantsClient client = CreateClient(config); + AssistantClient client = CreateClient(clientProvider); // Retrieve the assistant Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false); + // Ensure template is valid (avoid failure after posting assistant creation) + IPromptTemplate? template = + !string.IsNullOrWhiteSpace(model.Instructions) ? + templateFactory?.Create(new PromptTemplateConfig(model.Instructions!)) : + null; + // Instantiate the agent return - new OpenAIAssistantAgent(client, model, config) + new OpenAIAssistantAgent(model, clientProvider, client) { Kernel = kernel, + Arguments = defaultArguments, + Template = template, }; } @@ -180,12 +207,17 @@ public static async Task RetrieveAsync( ///
/// The to monitor for cancellation requests. The default is . /// The thread identifier - public async Task CreateThreadAsync(CancellationToken cancellationToken = default) - { - AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false); + public Task CreateThreadAsync(CancellationToken cancellationToken = default) + => AssistantThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken); - return thread.Id; - } + /// + /// Create a new assistant thread. + /// + /// The options for creating the thread + /// The to monitor for cancellation requests. The default is . + /// The thread identifier + public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default) + => AssistantThreadActions.CreateThreadAsync(this._client, options, cancellationToken); /// /// Create a new assistant thread. @@ -200,7 +232,28 @@ public async Task DeleteThreadAsync( // Validate input Verify.NotNullOrWhiteSpace(threadId, nameof(threadId)); - return await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false); + ThreadDeletionResult result = await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false); + + return result.Deleted; + } + + /// + /// Uploads an file for the purpose of using with assistant. + /// + /// The content to upload + /// The name of the file + /// The to monitor for cancellation requests. The default is . + /// The file identifier + /// + /// Use the directly for more advanced file operations. + /// + public async Task UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default) + { + OpenAIFileClient client = this._provider.Client.GetOpenAIFileClient(); + + OpenAIFile fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false); + + return fileInfo.Id; } /// @@ -232,7 +285,7 @@ public IAsyncEnumerable GetThreadMessagesAsync(string thread /// /// Delete the assistant definition. /// - /// + /// The to monitor for cancellation requests. The default is . /// True if assistant definition has been deleted /// /// Assistant based agent will not be useable after deletion. @@ -241,7 +294,8 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul { if (!this.IsDeleted) { - this.IsDeleted = (await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false)).Value; + AssistantDeletionResult result = await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false); + this.IsDeleted = result.Deleted; } return this.IsDeleted; @@ -254,12 +308,32 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul /// Optional arguments to pass to the agents's invocation, including any . /// The containing services, plugins, and other state for use by the agent. /// The to monitor for cancellation requests. The default is . - /// Asynchronous enumeration of messages. + /// Asynchronous enumeration of response messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken); + + /// + /// Invoke the assistant on the specified thread. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of response messages. /// /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. /// public async IAsyncEnumerable InvokeAsync( string threadId, + OpenAIAssistantInvocationOptions? options, KernelArguments? arguments = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) @@ -267,9 +341,9 @@ public async IAsyncEnumerable InvokeAsync( this.ThrowIfDeleted(); kernel ??= this.Kernel; - arguments ??= this.Arguments; + arguments = this.MergeArguments(arguments); - await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, this._config.Polling, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false)) + await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false)) { if (isVisible) { @@ -278,33 +352,64 @@ public async IAsyncEnumerable InvokeAsync( } } + /// + /// Invoke the assistant on the specified thread with streaming response. + /// + /// The thread identifier + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// Optional receiver of the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeStreamingAsync( + string threadId, + KernelArguments? arguments = null, + Kernel? kernel = null, + ChatHistory? messages = null, + CancellationToken cancellationToken = default) + => this.InvokeStreamingAsync(threadId, options: null, arguments, kernel, messages, cancellationToken); + + /// + /// Invoke the assistant on the specified thread with streaming response. + /// + /// The thread identifier + /// Optional invocation options + /// Optional arguments to pass to the agents's invocation, including any . + /// The containing services, plugins, and other state for use by the agent. + /// Optional receiver of the completed messages generated + /// The to monitor for cancellation requests. The default is . + /// Asynchronous enumeration of messages. + /// + /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility. + /// + public IAsyncEnumerable InvokeStreamingAsync( + string threadId, + OpenAIAssistantInvocationOptions? options, + KernelArguments? arguments = null, + Kernel? kernel = null, + ChatHistory? messages = null, + CancellationToken cancellationToken = default) + { + this.ThrowIfDeleted(); + + kernel ??= this.Kernel; + arguments = this.MergeArguments(arguments); + + return AssistantThreadActions.InvokeStreamingAsync(this, this._client, threadId, messages, options, this.Logger, kernel, arguments, cancellationToken); + } + /// protected override IEnumerable GetChannelKeys() { // Distinguish from other channel types. - yield return typeof(AgentChannel).FullName!; - - // Distinguish between different Azure OpenAI endpoints or OpenAI services. - yield return this._config.Endpoint ?? "openai"; + yield return typeof(OpenAIAssistantChannel).FullName!; - // Distinguish between different API versioning. - if (this._config.Version.HasValue) + foreach (string key in this._channelKeys) { - yield return this._config.Version.ToString()!; - } - - // Custom client receives dedicated channel. - if (this._config.HttpClient is not null) - { - if (this._config.HttpClient.BaseAddress is not null) - { - yield return this._config.HttpClient.BaseAddress.AbsoluteUri; - } - - foreach (string header in this._config.HttpClient.DefaultRequestHeaders.SelectMany(h => h.Value)) - { - yield return header; - } + yield return key; } } @@ -313,10 +418,12 @@ protected override async Task CreateChannelAsync(CancellationToken { this.Logger.LogOpenAIAssistantAgentCreatingChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel)); - AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false); + AssistantThread thread = await this._client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false); + + this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id); OpenAIAssistantChannel channel = - new(this._client, thread.Id, this._config.Polling) + new(this._client, thread.Id) { Logger = this.LoggerFactory.CreateLogger() }; @@ -334,17 +441,23 @@ internal void ThrowIfDeleted() } } + internal Task GetInstructionsAsync(Kernel kernel, KernelArguments? arguments, CancellationToken cancellationToken) => + this.FormatInstructionsAsync(kernel, arguments, cancellationToken); + /// /// Initializes a new instance of the class. /// private OpenAIAssistantAgent( - AssistantsClient client, Assistant model, - OpenAIAssistantConfiguration config) + OpenAIClientProvider provider, + AssistantClient client) { + this._provider = provider; this._assistant = model; - this._client = client; - this._config = config; + this._client = provider.Client.GetAssistantClient(); + this._channelKeys = provider.ConfigurationKeys.ToArray(); + + this.Definition = CreateAssistantDefinition(model); this.Description = this._assistant.Description; this.Id = this._assistant.Id; @@ -352,64 +465,39 @@ private OpenAIAssistantAgent( this.Instructions = this._assistant.Instructions; } - private static AssistantCreationOptions CreateAssistantCreationOptions(OpenAIAssistantDefinition definition) + private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant model) { - AssistantCreationOptions assistantCreationOptions = - new(definition.ModelId) - { - Description = definition.Description, - Instructions = definition.Instructions, - Name = definition.Name, - Metadata = definition.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value), - }; - - assistantCreationOptions.FileIds.AddRange(definition.FileIds ?? []); + OpenAIAssistantExecutionOptions? options = null; - if (definition.EnableCodeInterpreter) + if (model.Metadata.TryGetValue(OptionsMetadataKey, out string? optionsJson)) { - assistantCreationOptions.Tools.Add(new CodeInterpreterToolDefinition()); + options = JsonSerializer.Deserialize(optionsJson); } - if (definition.EnableRetrieval) - { - assistantCreationOptions.Tools.Add(new RetrievalToolDefinition()); - } - - return assistantCreationOptions; - } - - private static AssistantsClient CreateClient(OpenAIAssistantConfiguration config) - { - AssistantsClientOptions clientOptions = CreateClientOptions(config); + IReadOnlyList? fileIds = (IReadOnlyList?)model.ToolResources?.CodeInterpreter?.FileIds; + string? vectorStoreId = model.ToolResources?.FileSearch?.VectorStoreIds?.SingleOrDefault(); + bool enableJsonResponse = model.ResponseFormat is not null && model.ResponseFormat == AssistantResponseFormat.JsonObject; - // Inspect options - if (!string.IsNullOrWhiteSpace(config.Endpoint)) + return new(model.Model) { - // Create client configured for Azure OpenAI, if endpoint definition is present. - return new AssistantsClient(new Uri(config.Endpoint), new AzureKeyCredential(config.ApiKey), clientOptions); - } - - // Otherwise, create client configured for OpenAI. - return new AssistantsClient(config.ApiKey, clientOptions); + Id = model.Id, + Name = model.Name, + Description = model.Description, + Instructions = model.Instructions, + CodeInterpreterFileIds = fileIds, + EnableCodeInterpreter = model.Tools.Any(t => t is CodeInterpreterToolDefinition), + EnableFileSearch = model.Tools.Any(t => t is FileSearchToolDefinition), + Metadata = model.Metadata, + EnableJsonResponse = enableJsonResponse, + TopP = model.NucleusSamplingFactor, + Temperature = model.Temperature, + VectorStoreId = string.IsNullOrWhiteSpace(vectorStoreId) ? null : vectorStoreId, + ExecutionOptions = options, + }; } - private static AssistantsClientOptions CreateClientOptions(OpenAIAssistantConfiguration config) + private static AssistantClient CreateClient(OpenAIClientProvider config) { - AssistantsClientOptions options = - config.Version.HasValue ? - new(config.Version.Value) : - new(); - - options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; - options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), HttpPipelinePosition.PerCall); - - if (config.HttpClient is not null) - { - options.Transport = new HttpClientTransport(config.HttpClient); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. - options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout - } - - return options; + return config.Client.GetAssistantClient(); } } diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs new file mode 100644 index 000000000000..c2247ec11e88 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantCapabilities.cs @@ -0,0 +1,94 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Defines the capabilities of an assistant. +/// +public class OpenAIAssistantCapabilities +{ + /// + /// Identifies the AI model targeted by the agent. + /// + public string ModelId { get; } + + /// + /// The assistant's unique id. (Ignored on create.) + /// + public string Id { get; init; } = string.Empty; + + /// + /// Optional file-ids made available to the code_interpreter tool, if enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? CodeInterpreterFileIds { get; init; } + + /// + /// Set if code-interpreter is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableCodeInterpreter { get; init; } + + /// + /// Set if file-search is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableFileSearch { get; init; } + + /// + /// Set if json response-format is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableJsonResponse { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } + + /// + /// The sampling temperature to use, between 0 and 2. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get; init; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model + /// considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// Recommended to set this or temperature but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP { get; init; } + + /// + /// Requires file-search if specified. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? VectorStoreId { get; init; } + + /// + /// Default execution options for each agent invocation. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; } + + /// + /// Initializes a new instance of the class. + /// + /// The targeted model + [JsonConstructor] + public OpenAIAssistantCapabilities(string modelId) + { + Verify.NotNullOrWhiteSpace(modelId); + + this.ModelId = modelId; + } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs index 5b4600e64542..9e69e997e095 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs @@ -2,17 +2,18 @@ using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI.Assistants; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI.Assistants; namespace Microsoft.SemanticKernel.Agents.OpenAI; /// /// A specialization for use with . /// -internal sealed class OpenAIAssistantChannel(AssistantsClient client, string threadId, OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration) +internal sealed class OpenAIAssistantChannel(AssistantClient client, string threadId) : AgentChannel { - private readonly AssistantsClient _client = client; + private readonly AssistantClient _client = client; private readonly string _threadId = threadId; /// @@ -31,7 +32,15 @@ protected override async Task ReceiveAsync(IEnumerable histo { agent.ThrowIfDeleted(); - return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, pollingConfiguration, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); + } + + /// + protected override IAsyncEnumerable InvokeStreamingAsync(OpenAIAssistantAgent agent, IList messages, CancellationToken cancellationToken = default) + { + agent.ThrowIfDeleted(); + + return AssistantThreadActions.InvokeStreamingAsync(agent, this._client, this._threadId, messages, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken); } /// diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs deleted file mode 100644 index aa037266e7d5..000000000000 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs +++ /dev/null @@ -1,91 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Net.Http; -using Azure.AI.OpenAI.Assistants; - -namespace Microsoft.SemanticKernel.Agents.OpenAI; - -/// -/// Configuration to target an OpenAI Assistant API. -/// -public sealed class OpenAIAssistantConfiguration -{ - /// - /// The Assistants API Key. - /// - public string ApiKey { get; } - - /// - /// An optional endpoint if targeting Azure OpenAI Assistants API. - /// - public string? Endpoint { get; } - - /// - /// An optional API version override. - /// - public AssistantsClientOptions.ServiceVersion? Version { get; init; } - - /// - /// Custom for HTTP requests. - /// - public HttpClient? HttpClient { get; init; } - - /// - /// Defineds polling behavior for Assistant API requests. - /// - public PollingConfiguration Polling { get; } = new PollingConfiguration(); - - /// - /// Initializes a new instance of the class. - /// - /// The Assistants API Key - /// An optional endpoint if targeting Azure OpenAI Assistants API - public OpenAIAssistantConfiguration(string apiKey, string? endpoint = null) - { - Verify.NotNullOrWhiteSpace(apiKey); - if (!string.IsNullOrWhiteSpace(endpoint)) - { - // Only verify `endpoint` when provided (AzureOAI vs OpenAI) - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - } - - this.ApiKey = apiKey; - this.Endpoint = endpoint; - } - - /// - /// Configuration and defaults associated with polling behavior for Assistant API requests. - /// - public sealed class PollingConfiguration - { - /// - /// The default polling interval when monitoring thread-run status. - /// - public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500); - - /// - /// The default back-off interval when monitoring thread-run status. - /// - public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1); - - /// - /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. - /// - public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500); - - /// - /// The polling interval when monitoring thread-run status. - /// - public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval; - - /// - /// The back-off interval when monitoring thread-run status. - /// - public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff; - - /// - /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. - /// - public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay; - } -} diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs index 3699e07ee1ed..79ad3f98f03e 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs @@ -1,57 +1,56 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System.Collections.Generic; +using System.Text.Json.Serialization; namespace Microsoft.SemanticKernel.Agents.OpenAI; /// -/// The data associated with an assistant's definition. +/// Defines an assistant. /// -public sealed class OpenAIAssistantDefinition +public sealed class OpenAIAssistantDefinition : OpenAIAssistantCapabilities { - /// - /// Identifies the AI model (OpenAI) or deployment (AzureOAI) this agent targets. - /// - public string? ModelId { get; init; } - /// /// The description of the assistant. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Description { get; init; } - /// - /// The assistant's unique id. (Ignored on create.) - /// - public string? Id { get; init; } - /// /// The system instructions for the assistant to use. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Instructions { get; init; } /// /// The name of the assistant. /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Name { get; init; } /// - /// Set if code-interpreter is enabled. + /// Provide the captured template format for the assistant if needed for agent retrieval. + /// () /// - public bool EnableCodeInterpreter { get; init; } + [JsonIgnore] + public string? TemplateFactoryFormat + { + get + { + if (this.Metadata == null) + { + return null; + } - /// - /// Set if retrieval is enabled. - /// - public bool EnableRetrieval { get; init; } + this.Metadata.TryGetValue(OpenAIAssistantAgent.TemplateMetadataKey, out string? templateFormat); - /// - /// A list of previously uploaded file IDs to attach to the assistant. - /// - public IEnumerable? FileIds { get; init; } + return templateFormat; + } + } /// - /// A set of up to 16 key/value pairs that can be attached to an agent, used for - /// storing additional information about that object in a structured format.Keys - /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// Initializes a new instance of the class. /// - public IReadOnlyDictionary? Metadata { get; init; } + /// The targeted model + [JsonConstructor] + public OpenAIAssistantDefinition(string modelId) + : base(modelId) { } } diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs new file mode 100644 index 000000000000..845cecb0956c --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs @@ -0,0 +1,44 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Defines assistant execution options for each invocation. +/// +/// +/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options" +/// +public sealed class OpenAIAssistantExecutionOptions +{ + /// + /// Appends additional instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? AdditionalInstructions { get; init; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; init; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; init; } + + /// + /// Enables parallel function calling during tool use. Enabled by default. + /// Use this property to disable. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// When set, the thread will be truncated to the N most recent messages in the thread. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TruncationMessageCount { get; init; } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs new file mode 100644 index 000000000000..c06921a6f0d0 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs @@ -0,0 +1,94 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Defines per invocation execution options that override the assistant definition. +/// +/// +/// Not applicable to usage. +/// +public sealed class OpenAIAssistantInvocationOptions +{ + /// + /// Override the AI model targeted by the agent. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ModelName { get; init; } + + /// + /// Appends additional instructions. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? AdditionalInstructions { get; init; } + + /// + /// Set if code_interpreter tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableCodeInterpreter { get; init; } + + /// + /// Set if file_search tool is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool EnableFileSearch { get; init; } + + /// + /// Set if json response-format is enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? EnableJsonResponse { get; init; } + + /// + /// The maximum number of completion tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxCompletionTokens { get; init; } + + /// + /// The maximum number of prompt tokens that may be used over the course of the run. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxPromptTokens { get; init; } + + /// + /// Enables parallel function calling during tool use. Enabled by default. + /// Use this property to disable. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? ParallelToolCallsEnabled { get; init; } + + /// + /// When set, the thread will be truncated to the N most recent messages in the thread. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TruncationMessageCount { get; init; } + + /// + /// The sampling temperature to use, between 0 and 2. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get; init; } + + /// + /// An alternative to sampling with temperature, called nucleus sampling, where the model + /// considers the results of the tokens with top_p probability mass. + /// So 0.1 means only the tokens comprising the top 10% probability mass are considered. + /// + /// + /// Recommended to set this or temperature but not both. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs new file mode 100644 index 000000000000..4eb09eed7889 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs @@ -0,0 +1,172 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Threading; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.SemanticKernel.Http; +using OpenAI; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Provides an for use by . +/// +public sealed class OpenAIClientProvider +{ + /// + /// Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key. + /// + private const string SingleSpaceKey = " "; + + /// + /// An active client instance. + /// + public OpenAIClient Client { get; } + + /// + /// Configuration keys required for management. + /// + internal IReadOnlyList ConfigurationKeys { get; } + + private OpenAIClientProvider(OpenAIClient client, IEnumerable keys) + { + this.Client = client; + this.ConfigurationKeys = keys.ToArray(); + } + + /// + /// Produce a based on . + /// + /// The API key + /// The service endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null) + { + Verify.NotNull(apiKey, nameof(apiKey)); + Verify.NotNull(endpoint, nameof(endpoint)); + + AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient); + + return new(new AzureOpenAIClient(endpoint, apiKey!, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Produce a based on . + /// + /// The credentials + /// The service endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null) + { + Verify.NotNull(credential, nameof(credential)); + Verify.NotNull(endpoint, nameof(endpoint)); + + AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient); + + return new(new AzureOpenAIClient(endpoint, credential, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Produce a based on . + /// + /// An optional endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? httpClient = null) + { + OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient); + return new(new OpenAIClient(new ApiKeyCredential(SingleSpaceKey), clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Produce a based on . + /// + /// The API key + /// An optional endpoint + /// Custom for HTTP requests. + public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null) + { + OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient); + return new(new OpenAIClient(apiKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient)); + } + + /// + /// Directly provide a client instance. + /// + public static OpenAIClientProvider FromClient(OpenAIClient client) + { + return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]); + } + + private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? httpClient) + { + AzureOpenAIClientOptions options = new() + { + UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent + }; + + ConfigureClientOptions(httpClient, options); + + return options; + } + + private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, HttpClient? httpClient) + { + OpenAIClientOptions options = new() + { + UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent, + Endpoint = endpoint ?? httpClient?.BaseAddress, + }; + + ConfigureClientOptions(httpClient, options); + + return options; + } + + private static void ConfigureClientOptions(HttpClient? httpClient, ClientPipelineOptions options) + { + options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), PipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientPipelineTransport(httpClient); + options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided. + options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout + } + } + + private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue) + => + new((message) => + { + if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false) + { + message.Request.Headers.Set(headerName, headerValue); + } + }); + + private static IEnumerable CreateConfigurationKeys(Uri? endpoint, HttpClient? httpClient) + { + if (endpoint != null) + { + yield return endpoint.ToString(); + } + + if (httpClient is not null) + { + if (httpClient.BaseAddress is not null) + { + yield return httpClient.BaseAddress.AbsoluteUri; + } + + foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value)) + { + yield return header; + } + } + } +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs new file mode 100644 index 000000000000..3f39c43d03dc --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs @@ -0,0 +1,37 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Thread creation options. +/// +public sealed class OpenAIThreadCreationOptions +{ + /// + /// Optional file-ids made available to the code_interpreter tool, if enabled. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? CodeInterpreterFileIds { get; init; } + + /// + /// Optional messages to initialize thread with.. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList? Messages { get; init; } + + /// + /// Enables file-search if specified. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? VectorStoreId { get; init; } + + /// + /// A set of up to 16 key/value pairs that can be attached to an agent, used for + /// storing additional information about that object in a structured format.Keys + /// may be up to 64 characters in length and values may be up to 512 characters in length. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary? Metadata { get; init; } +} diff --git a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs new file mode 100644 index 000000000000..756ba689131c --- /dev/null +++ b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs @@ -0,0 +1,57 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Configuration and defaults associated with polling behavior for Assistant API run processing. +/// +public sealed class RunPollingOptions +{ + /// + /// The default polling interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The default back-off interval when monitoring thread-run status. + /// + public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1); + + /// + /// The default number of polling iterations before using . + /// + public static int DefaultPollingBackoffThreshold { get; } = 2; + + /// + /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500); + + /// + /// The polling interval when monitoring thread-run status. + /// + public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval; + + /// + /// The back-off interval when monitoring thread-run status. + /// + public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff; + + /// + /// The number of polling iterations before using . + /// + public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold; + + /// + /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag. + /// + public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay; + + /// + /// Gets the polling interval for the specified iteration count. + /// + /// The number of polling iterations already attempted + public TimeSpan GetPollingInterval(int iterationCount) => + iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval; +} diff --git a/dotnet/src/Agents/UnitTests/AgentChannelTests.cs b/dotnet/src/Agents/UnitTests/AgentChannelTests.cs index 50aa328ebc67..a54b049795d8 100644 --- a/dotnet/src/Agents/UnitTests/AgentChannelTests.cs +++ b/dotnet/src/Agents/UnitTests/AgentChannelTests.cs @@ -1,12 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; +using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests; @@ -23,63 +20,20 @@ public class AgentChannelTests [Fact] public async Task VerifyAgentChannelUpcastAsync() { - TestChannel channel = new(); + // Arrange + MockChannel channel = new(); + // Assert Assert.Equal(0, channel.InvokeCount); - var messages = channel.InvokeAgentAsync(new TestAgent()).ToArrayAsync(); + // Act + var messages = await channel.InvokeAgentAsync(new MockAgent()).ToArrayAsync(); + // Assert Assert.Equal(1, channel.InvokeCount); - await Assert.ThrowsAsync(() => channel.InvokeAgentAsync(new NextAgent()).ToArrayAsync().AsTask()); + // Act + Mock mockAgent = new(); + await Assert.ThrowsAsync(() => channel.InvokeAgentAsync(mockAgent.Object).ToArrayAsync().AsTask()); + // Assert Assert.Equal(1, channel.InvokeCount); } - - /// - /// Not using mock as the goal here is to provide entrypoint to protected method. - /// - private sealed class TestChannel : AgentChannel - { - public int InvokeCount { get; private set; } - - public IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAgentAsync(Agent agent, CancellationToken cancellationToken = default) - => base.InvokeAsync(agent, cancellationToken); - -#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(TestAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) -#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously - { - this.InvokeCount++; - - yield break; - } - - protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - protected internal override Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - - protected internal override Task ResetAsync(CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - } - - private sealed class NextAgent : TestAgent; - - private class TestAgent : KernelAgent - { - protected internal override Task CreateChannelAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - protected internal override IEnumerable GetChannelKeys() - { - throw new NotImplementedException(); - } - } } diff --git a/dotnet/src/Agents/UnitTests/AgentChatTests.cs b/dotnet/src/Agents/UnitTests/AgentChatTests.cs index fc295e2b5550..be78e01b3211 100644 --- a/dotnet/src/Agents/UnitTests/AgentChatTests.cs +++ b/dotnet/src/Agents/UnitTests/AgentChatTests.cs @@ -3,9 +3,11 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; +using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; +using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests; @@ -21,36 +23,36 @@ public class AgentChatTests [Fact] public async Task VerifyAgentChatLifecycleAsync() { - // Create chat + // Arrange: Create chat TestChat chat = new(); - // Verify initial state + // Assert: Verify initial state Assert.False(chat.IsActive); await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent history - // Inject history + // Act: Inject history chat.AddChatMessages([new ChatMessageContent(AuthorRole.User, "More")]); chat.AddChatMessages([new ChatMessageContent(AuthorRole.User, "And then some")]); - // Verify updated history + // Assert: Verify updated history await this.VerifyHistoryAsync(expectedCount: 2, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent hasn't joined - // Invoke with input & verify (agent joins chat) + // Act: Invoke with input & verify (agent joins chat) chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "hi")); await chat.InvokeAsync().ToArrayAsync(); - Assert.Equal(1, chat.Agent.InvokeCount); - // Verify updated history + // Assert: Verify updated history + Assert.Equal(1, chat.Agent.InvokeCount); await this.VerifyHistoryAsync(expectedCount: 4, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 4, chat.GetChatMessagesAsync(chat.Agent)); // Agent history - // Invoke without input & verify + // Act: Invoke without input await chat.InvokeAsync().ToArrayAsync(); - Assert.Equal(2, chat.Agent.InvokeCount); - // Verify final history + // Assert: Verify final history + Assert.Equal(2, chat.Agent.InvokeCount); await this.VerifyHistoryAsync(expectedCount: 5, chat.GetChatMessagesAsync()); // Primary history await this.VerifyHistoryAsync(expectedCount: 5, chat.GetChatMessagesAsync(chat.Agent)); // Agent history @@ -63,19 +65,46 @@ public async Task VerifyAgentChatLifecycleAsync() await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent history } + /// + /// Verify throw exception for system message. + /// + [Fact] + public void VerifyAgentChatRejectsSystemMessage() + { + // Arrange: Create chat + TestChat chat = new() { LoggerFactory = new Mock().Object }; + + // Assert and Act: Verify system message not accepted + Assert.Throws(() => chat.AddChatMessage(new ChatMessageContent(AuthorRole.System, "hi"))); + } + + /// + /// Verify throw exception for if invoked when active. + /// + [Fact] + public async Task VerifyAgentChatThrowsWhenActiveAsync() + { + // Arrange: Create chat + TestChat chat = new(); + + // Assert and Act: Verify system message not accepted + await Assert.ThrowsAsync(() => chat.InvalidInvokeAsync().ToArrayAsync().AsTask()); + } + /// /// Verify the management of instances as they join . /// [Fact(Skip = "Not 100% reliable for github workflows, but useful for dev testing.")] public async Task VerifyGroupAgentChatConcurrencyAsync() { + // Arrange TestChat chat = new(); Task[] tasks; int isActive = 0; - // Queue concurrent tasks + // Act: Queue concurrent tasks object syncObject = new(); lock (syncObject) { @@ -97,7 +126,7 @@ public async Task VerifyGroupAgentChatConcurrencyAsync() await Task.Yield(); - // Verify failure + // Assert: Verify failure await Assert.ThrowsAsync(() => Task.WhenAll(tasks)); async Task SynchronizedInvokeAsync() @@ -127,5 +156,18 @@ private sealed class TestChat : AgentChat public override IAsyncEnumerable InvokeAsync( CancellationToken cancellationToken = default) => this.InvokeAgentAsync(this.Agent, cancellationToken); + + public IAsyncEnumerable InvalidInvokeAsync( + CancellationToken cancellationToken = default) + { + this.SetActivityOrThrow(); + return this.InvokeAgentAsync(this.Agent, cancellationToken); + } + + public override IAsyncEnumerable InvokeStreamingAsync(CancellationToken cancellationToken = default) + { + StreamingChatMessageContent[] messages = [new StreamingChatMessageContent(AuthorRole.Assistant, "sup")]; + return messages.ToAsyncEnumerable(); + } } } diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj index d46a4ee0cd1e..646defc0abb6 100644 --- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj +++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj @@ -8,7 +8,7 @@ true false 12 - $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110 + $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001 @@ -25,16 +25,14 @@ all - - + - diff --git a/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs b/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs index 1a607ea7e6c7..e6668c7ea568 100644 --- a/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs @@ -21,6 +21,7 @@ public class AggregatorAgentTests [InlineData(AggregatorMode.Flat, 2)] public async Task VerifyAggregatorAgentUsageAsync(AggregatorMode mode, int modeOffset) { + // Arrange Agent agent1 = CreateMockAgent(); Agent agent2 = CreateMockAgent(); Agent agent3 = CreateMockAgent(); @@ -44,38 +45,57 @@ public async Task VerifyAggregatorAgentUsageAsync(AggregatorMode mode, int modeO // Add message to outer chat (no agent has joined) uberChat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "test uber")); + // Act var messages = await uberChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Single(messages); + // Act messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Empty(messages); // Agent hasn't joined chat, no broadcast + // Act messages = await groupChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Empty(messages); // Agent hasn't joined chat, no broadcast - // Add message to inner chat (not visible to parent) + // Arrange: Add message to inner chat (not visible to parent) groupChat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "test inner")); + // Act messages = await uberChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Single(messages); + // Act messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Empty(messages); // Agent still hasn't joined chat + // Act messages = await groupChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Single(messages); - // Invoke outer chat (outer chat captures final inner message) + // Act: Invoke outer chat (outer chat captures final inner message) messages = await uberChat.InvokeAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Equal(1 + modeOffset, messages.Length); // New messages generated from inner chat + // Act messages = await uberChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Equal(2 + modeOffset, messages.Length); // Total messages on uber chat + // Act messages = await groupChat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Equal(5, messages.Length); // Total messages on inner chat once synchronized + // Act messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync(); + // Assert Assert.Equal(5, messages.Length); // Total messages on inner chat once synchronized (agent equivalent) } diff --git a/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs b/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs index 7c3267e3ad73..1c417a9e02ad 100644 --- a/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs @@ -23,12 +23,18 @@ public class AgentGroupChatTests [Fact] public void VerifyGroupAgentChatDefaultState() { + // Arrange AgentGroupChat chat = new(); + + // Assert Assert.Empty(chat.Agents); Assert.NotNull(chat.ExecutionSettings); Assert.False(chat.IsComplete); + // Act chat.IsComplete = true; + + // Assert Assert.True(chat.IsComplete); } @@ -38,18 +44,25 @@ public void VerifyGroupAgentChatDefaultState() [Fact] public async Task VerifyGroupAgentChatAgentMembershipAsync() { + // Arrange Agent agent1 = CreateMockAgent(); Agent agent2 = CreateMockAgent(); Agent agent3 = CreateMockAgent(); Agent agent4 = CreateMockAgent(); AgentGroupChat chat = new(agent1, agent2); + + // Assert Assert.Equal(2, chat.Agents.Count); + // Act chat.AddAgent(agent3); + // Assert Assert.Equal(3, chat.Agents.Count); + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent4).ToArrayAsync(); + // Assert Assert.Equal(4, chat.Agents.Count); } @@ -59,6 +72,7 @@ public async Task VerifyGroupAgentChatAgentMembershipAsync() [Fact] public async Task VerifyGroupAgentChatMultiTurnAsync() { + // Arrange Agent agent1 = CreateMockAgent(); Agent agent2 = CreateMockAgent(); Agent agent3 = CreateMockAgent(); @@ -78,10 +92,14 @@ public async Task VerifyGroupAgentChatMultiTurnAsync() IsComplete = true }; + // Act and Assert await Assert.ThrowsAsync(() => chat.InvokeAsync(CancellationToken.None).ToArrayAsync().AsTask()); + // Act chat.ExecutionSettings.TerminationStrategy.AutomaticReset = true; var messages = await chat.InvokeAsync(CancellationToken.None).ToArrayAsync(); + + // Assert Assert.Equal(9, messages.Length); Assert.False(chat.IsComplete); @@ -108,6 +126,7 @@ public async Task VerifyGroupAgentChatMultiTurnAsync() [Fact] public async Task VerifyGroupAgentChatFailedSelectionAsync() { + // Arrange AgentGroupChat chat = Create3AgentChat(); chat.ExecutionSettings = @@ -125,6 +144,7 @@ public async Task VerifyGroupAgentChatFailedSelectionAsync() // Remove max-limit in order to isolate the target behavior. chat.ExecutionSettings.TerminationStrategy.MaximumIterations = int.MaxValue; + // Act and Assert await Assert.ThrowsAsync(() => chat.InvokeAsync().ToArrayAsync().AsTask()); } @@ -134,6 +154,7 @@ public async Task VerifyGroupAgentChatFailedSelectionAsync() [Fact] public async Task VerifyGroupAgentChatMultiTurnTerminationAsync() { + // Arrange AgentGroupChat chat = Create3AgentChat(); chat.ExecutionSettings = @@ -147,7 +168,10 @@ public async Task VerifyGroupAgentChatMultiTurnTerminationAsync() } }; + // Act var messages = await chat.InvokeAsync(CancellationToken.None).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.True(chat.IsComplete); } @@ -158,6 +182,7 @@ public async Task VerifyGroupAgentChatMultiTurnTerminationAsync() [Fact] public async Task VerifyGroupAgentChatDiscreteTerminationAsync() { + // Arrange Agent agent1 = CreateMockAgent(); AgentGroupChat chat = @@ -175,7 +200,10 @@ public async Task VerifyGroupAgentChatDiscreteTerminationAsync() } }; + // Act var messages = await chat.InvokeAsync(agent1).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.True(chat.IsComplete); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs index d17391ee24be..ecb5cd6eee33 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs @@ -16,7 +16,10 @@ public class AgentGroupChatSettingsTests [Fact] public void VerifyChatExecutionSettingsDefault() { + // Arrange AgentGroupChatSettings settings = new(); + + // Assert Assert.IsType(settings.TerminationStrategy); Assert.Equal(1, settings.TerminationStrategy.MaximumIterations); Assert.IsType(settings.SelectionStrategy); @@ -28,6 +31,7 @@ public void VerifyChatExecutionSettingsDefault() [Fact] public void VerifyChatExecutionContinuationStrategyDefault() { + // Arrange Mock strategyMock = new(); AgentGroupChatSettings settings = new() @@ -35,6 +39,7 @@ public void VerifyChatExecutionContinuationStrategyDefault() TerminationStrategy = strategyMock.Object }; + // Assert Assert.Equal(strategyMock.Object, settings.TerminationStrategy); } @@ -44,6 +49,7 @@ public void VerifyChatExecutionContinuationStrategyDefault() [Fact] public void VerifyChatExecutionSelectionStrategyDefault() { + // Arrange Mock strategyMock = new(); AgentGroupChatSettings settings = new() @@ -51,6 +57,7 @@ public void VerifyChatExecutionSelectionStrategyDefault() SelectionStrategy = strategyMock.Object }; + // Assert Assert.NotNull(settings.SelectionStrategy); Assert.Equal(strategyMock.Object, settings.SelectionStrategy); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs index 6ad6fd75b18f..5af211c6cdf1 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs @@ -6,7 +6,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -22,7 +21,10 @@ public class AggregatorTerminationStrategyTests [Fact] public void VerifyAggregateTerminationStrategyInitialState() { + // Arrange AggregatorTerminationStrategy strategy = new(); + + // Assert Assert.Equal(AggregateTerminationCondition.All, strategy.Condition); } @@ -32,14 +34,16 @@ public void VerifyAggregateTerminationStrategyInitialState() [Fact] public async Task VerifyAggregateTerminationStrategyAnyAsync() { + // Arrange TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true); TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false); - Mock agentMock = new(); + MockAgent agentMock = new(); + // Act and Assert await VerifyResultAsync( expectedResult: true, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockFalse) { Condition = AggregateTerminationCondition.Any, @@ -47,7 +51,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: false, - agentMock.Object, + agentMock, new(strategyMockFalse, strategyMockFalse) { Condition = AggregateTerminationCondition.Any, @@ -55,7 +59,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: true, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockTrue) { Condition = AggregateTerminationCondition.Any, @@ -68,14 +72,16 @@ await VerifyResultAsync( [Fact] public async Task VerifyAggregateTerminationStrategyAllAsync() { + // Arrange TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true); TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false); - Mock agentMock = new(); + MockAgent agentMock = new(); + // Act and Assert await VerifyResultAsync( expectedResult: false, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockFalse) { Condition = AggregateTerminationCondition.All, @@ -83,7 +89,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: false, - agentMock.Object, + agentMock, new(strategyMockFalse, strategyMockFalse) { Condition = AggregateTerminationCondition.All, @@ -91,7 +97,7 @@ await VerifyResultAsync( await VerifyResultAsync( expectedResult: true, - agentMock.Object, + agentMock, new(strategyMockTrue, strategyMockTrue) { Condition = AggregateTerminationCondition.All, @@ -104,34 +110,39 @@ await VerifyResultAsync( [Fact] public async Task VerifyAggregateTerminationStrategyAgentAsync() { + // Arrange TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true); TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false); - Mock agentMockA = new(); - Mock agentMockB = new(); + MockAgent agentMockA = new(); + MockAgent agentMockB = new(); + // Act and Assert await VerifyResultAsync( expectedResult: false, - agentMockB.Object, + agentMockB, new(strategyMockTrue, strategyMockTrue) { - Agents = [agentMockA.Object], + Agents = [agentMockA], Condition = AggregateTerminationCondition.All, }); await VerifyResultAsync( expectedResult: true, - agentMockB.Object, + agentMockB, new(strategyMockTrue, strategyMockTrue) { - Agents = [agentMockB.Object], + Agents = [agentMockB], Condition = AggregateTerminationCondition.All, }); } private static async Task VerifyResultAsync(bool expectedResult, Agent agent, AggregatorTerminationStrategy strategyRoot) { + // Act var result = await strategyRoot.ShouldTerminateAsync(agent, []); + + // Assert Assert.Equal(expectedResult, result); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs index 275ef0e0bf5e..a9f1d461ed85 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs @@ -5,7 +5,6 @@ using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -21,8 +20,9 @@ public class KernelFunctionSelectionStrategyTests [Fact] public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync() { - Mock mockAgent = new(); - KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Object.Id)); + // Arrange + MockAgent mockAgent = new(); + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Id)); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) @@ -32,16 +32,40 @@ public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync() ResultParser = (result) => result.GetValue() ?? string.Empty, }; + // Assert Assert.Null(strategy.Arguments); Assert.NotNull(strategy.Kernel); Assert.NotNull(strategy.ResultParser); Assert.Equal("_a_", strategy.AgentsVariableName); Assert.Equal("_h_", strategy.HistoryVariableName); - Agent nextAgent = await strategy.NextAsync([mockAgent.Object], []); + // Act + Agent nextAgent = await strategy.NextAsync([mockAgent], []); + // Assert Assert.NotNull(nextAgent); - Assert.Equal(mockAgent.Object, nextAgent); + Assert.Equal(mockAgent, nextAgent); + } + + /// + /// Verify strategy mismatch. + /// + [Fact] + public async Task VerifyKernelFunctionSelectionStrategyThrowsOnNullResultAsync() + { + // Arrange + MockAgent mockAgent = new(); + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Id)); + + KernelFunctionSelectionStrategy strategy = + new(plugin.Single(), new()) + { + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, + ResultParser = (result) => "larry", + }; + + // Act and Assert + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); } /// /// Verify default state and behavior @@ -49,21 +73,21 @@ public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync() [Fact] public async Task VerifyKernelFunctionSelectionStrategyInitialAgentAsync() { - Mock mockAgent1 = new(); - Mock mockAgent2 = new(); - KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent2.Object.Id)); + MockAgent mockAgent1 = new(); + MockAgent mockAgent2 = new(); + KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent2.Id)); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - InitialAgent = mockAgent1.Object, + InitialAgent = mockAgent1, ResultParser = (result) => result.GetValue() ?? string.Empty, }; - Agent nextAgent = await strategy.NextAsync([mockAgent2.Object], []); + Agent nextAgent = await strategy.NextAsync([mockAgent2], []); Assert.NotNull(nextAgent); - Assert.Equal(mockAgent1.Object, nextAgent); + Assert.Equal(mockAgent1, nextAgent); } /// @@ -72,25 +96,25 @@ public async Task VerifyKernelFunctionSelectionStrategyInitialAgentAsync() [Fact] public async Task VerifyKernelFunctionSelectionStrategyNullAgentAsync() { - Mock mockAgent = new(); + MockAgent mockAgent = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(null)); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, UseInitialAgentAsFallback = true }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); } /// @@ -99,25 +123,27 @@ public async Task VerifyKernelFunctionSelectionStrategyNullAgentAsync() [Fact] public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackWithNoInitialAgentAsync() { - Mock mockAgent = new(); + // Arrange + MockAgent mockAgent = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin("bad")); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, UseInitialAgentAsFallback = true }; - await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], [])); + // Act and Assert + await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], [])); } /// @@ -126,21 +152,21 @@ public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackWithNoIni [Fact] public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackAsync() { - Mock mockAgent = new(); + MockAgent mockAgent = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin("bad")); KernelFunctionSelectionStrategy strategy = new(plugin.Single(), new()) { - Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } }, - InitialAgent = mockAgent.Object, + Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } }, + InitialAgent = mockAgent, UseInitialAgentAsFallback = true }; - Agent nextAgent = await strategy.NextAsync([mockAgent.Object], []); + Agent nextAgent = await strategy.NextAsync([mockAgent], []); Assert.NotNull(nextAgent); - Assert.Equal(mockAgent.Object, nextAgent); + Assert.Equal(mockAgent, nextAgent); } private sealed class TestPlugin(string? agentName) diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs index 6f0b446e5e7a..7ee5cf838bc3 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs @@ -3,10 +3,8 @@ using System.Linq; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -22,17 +20,26 @@ public class KernelFunctionTerminationStrategyTests [Fact] public async Task VerifyKernelFunctionTerminationStrategyDefaultsAsync() { + // Arrange KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin()); - KernelFunctionTerminationStrategy strategy = new(plugin.Single(), new()); + KernelFunctionTerminationStrategy strategy = + new(plugin.Single(), new()) + { + AgentVariableName = "agent", + HistoryVariableName = "history", + }; + // Assert Assert.Null(strategy.Arguments); Assert.NotNull(strategy.Kernel); Assert.NotNull(strategy.ResultParser); + Assert.NotEqual("agent", KernelFunctionTerminationStrategy.DefaultAgentVariableName); + Assert.NotEqual("history", KernelFunctionTerminationStrategy.DefaultHistoryVariableName); - Mock mockAgent = new(); - - bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent.Object, []); + // Act + MockAgent mockAgent = new(); + bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent, []); Assert.True(isTerminating); } @@ -52,9 +59,9 @@ public async Task VerifyKernelFunctionTerminationStrategyParsingAsync() ResultParser = (result) => string.Equals("test", result.GetValue(), StringComparison.OrdinalIgnoreCase) }; - Mock mockAgent = new(); + MockAgent mockAgent = new(); - bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent.Object, []); + bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent, []); Assert.True(isTerminating); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs index a1b739ae1d1e..196a89ded6e3 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs @@ -2,10 +2,8 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; using Microsoft.SemanticKernel.ChatCompletion; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -13,7 +11,7 @@ namespace SemanticKernel.Agents.UnitTests.Core.Chat; /// /// Unit testing of . /// -public class RegexTerminationStrategyTests +public partial class RegexTerminationStrategyTests { /// /// Verify abililty of strategy to match expression. @@ -21,10 +19,12 @@ public class RegexTerminationStrategyTests [Fact] public async Task VerifyExpressionTerminationStrategyAsync() { + // Arrange RegexTerminationStrategy strategy = new("test"); - Regex r = new("(?:^|\\W)test(?:$|\\W)"); + Regex r = MyRegex(); + // Act and Assert await VerifyResultAsync( expectedResult: false, new(r), @@ -38,9 +38,17 @@ await VerifyResultAsync( private static async Task VerifyResultAsync(bool expectedResult, RegexTerminationStrategy strategyRoot, string content) { + // Arrange ChatMessageContent message = new(AuthorRole.Assistant, content); - Mock agent = new(); - var result = await strategyRoot.ShouldTerminateAsync(agent.Object, [message]); + MockAgent agent = new(); + + // Act + var result = await strategyRoot.ShouldTerminateAsync(agent, [message]); + + // Assert Assert.Equal(expectedResult, result); } + + [GeneratedRegex("(?:^|\\W)test(?:$|\\W)")] + private static partial Regex MyRegex(); } diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs index bb8fb4665b36..2d06fb6d0078 100644 --- a/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs @@ -3,7 +3,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Chat; -using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core.Chat; @@ -19,24 +18,27 @@ public class SequentialSelectionStrategyTests [Fact] public async Task VerifySequentialSelectionStrategyTurnsAsync() { - Mock agent1 = new(); - Mock agent2 = new(); + // Arrange + MockAgent agent1 = new(); + MockAgent agent2 = new(); - Agent[] agents = [agent1.Object, agent2.Object]; + Agent[] agents = [agent1, agent2]; SequentialSelectionStrategy strategy = new(); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); - await VerifyNextAgentAsync(agent2.Object, agents, strategy); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); - await VerifyNextAgentAsync(agent2.Object, agents, strategy); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); + // Act and Assert + await VerifyNextAgentAsync(agent1, agents, strategy); + await VerifyNextAgentAsync(agent2, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); + await VerifyNextAgentAsync(agent2, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); + // Arrange strategy.Reset(); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); // Verify index does not exceed current bounds. - agents = [agent1.Object]; - await VerifyNextAgentAsync(agent1.Object, agents, strategy); + agents = [agent1]; + await VerifyNextAgentAsync(agent1, agents, strategy); } /// @@ -45,25 +47,18 @@ public async Task VerifySequentialSelectionStrategyTurnsAsync() [Fact] public async Task VerifySequentialSelectionStrategyInitialAgentAsync() { - Mock agent1 = new(); - Mock agent2 = new(); + MockAgent agent1 = new(); + MockAgent agent2 = new(); - Agent[] agents = [agent1.Object, agent2.Object]; + Agent[] agents = [agent1, agent2]; SequentialSelectionStrategy strategy = new() { - InitialAgent = agent2.Object + InitialAgent = agent2 }; - await VerifyNextAgentAsync(agent2.Object, agents, strategy); - await VerifyNextAgentAsync(agent1.Object, agents, strategy); - } - - private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agents, SequentialSelectionStrategy strategy) - { - Agent? nextAgent = await strategy.NextAsync(agents, []); - Assert.NotNull(nextAgent); - Assert.Equal(expectedAgent.Id, nextAgent.Id); + await VerifyNextAgentAsync(agent2, agents, strategy); + await VerifyNextAgentAsync(agent1, agents, strategy); } /// @@ -72,7 +67,19 @@ private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agen [Fact] public async Task VerifySequentialSelectionStrategyEmptyAsync() { + // Arrange SequentialSelectionStrategy strategy = new(); + + // Act and Assert await Assert.ThrowsAsync(() => strategy.NextAsync([], [])); } + + private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agents, SequentialSelectionStrategy strategy) + { + // Act + Agent? nextAgent = await strategy.NextAsync(agents, []); + // Assert + Assert.NotNull(nextAgent); + Assert.Equal(expectedAgent.Id, nextAgent.Id); + } } diff --git a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs index c8a1c0578613..01debd8ded5f 100644 --- a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs @@ -5,6 +5,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.History; using Microsoft.SemanticKernel.ChatCompletion; using Moq; using Xunit; @@ -22,6 +23,7 @@ public class ChatCompletionAgentTests [Fact] public void VerifyChatCompletionAgentDefinition() { + // Arrange ChatCompletionAgent agent = new() { @@ -30,6 +32,7 @@ public void VerifyChatCompletionAgentDefinition() Name = "test name", }; + // Assert Assert.NotNull(agent.Id); Assert.Equal("test instructions", agent.Instructions); Assert.Equal("test description", agent.Description); @@ -43,7 +46,8 @@ public void VerifyChatCompletionAgentDefinition() [Fact] public async Task VerifyChatCompletionAgentInvocationAsync() { - var mockService = new Mock(); + // Arrange + Mock mockService = new(); mockService.Setup( s => s.GetChatMessageContentsAsync( It.IsAny(), @@ -51,16 +55,18 @@ public async Task VerifyChatCompletionAgentInvocationAsync() It.IsAny(), It.IsAny())).ReturnsAsync([new(AuthorRole.Assistant, "what?")]); - var agent = - new ChatCompletionAgent() + ChatCompletionAgent agent = + new() { Instructions = "test instructions", Kernel = CreateKernel(mockService.Object), Arguments = [], }; - var result = await agent.InvokeAsync([]).ToArrayAsync(); + // Act + ChatMessageContent[] result = await agent.InvokeAsync([]).ToArrayAsync(); + // Assert Assert.Single(result); mockService.Verify( @@ -79,13 +85,14 @@ public async Task VerifyChatCompletionAgentInvocationAsync() [Fact] public async Task VerifyChatCompletionAgentStreamingAsync() { + // Arrange StreamingChatMessageContent[] returnContent = [ new(AuthorRole.Assistant, "wh"), new(AuthorRole.Assistant, "at?"), ]; - var mockService = new Mock(); + Mock mockService = new(); mockService.Setup( s => s.GetStreamingChatMessageContentsAsync( It.IsAny(), @@ -93,16 +100,18 @@ public async Task VerifyChatCompletionAgentStreamingAsync() It.IsAny(), It.IsAny())).Returns(returnContent.ToAsyncEnumerable()); - var agent = - new ChatCompletionAgent() + ChatCompletionAgent agent = + new() { Instructions = "test instructions", Kernel = CreateKernel(mockService.Object), Arguments = [], }; - var result = await agent.InvokeStreamingAsync([]).ToArrayAsync(); + // Act + StreamingChatMessageContent[] result = await agent.InvokeStreamingAsync([]).ToArrayAsync(); + // Assert Assert.Equal(2, result.Length); mockService.Verify( @@ -115,6 +124,52 @@ public async Task VerifyChatCompletionAgentStreamingAsync() Times.Once); } + /// + /// Verify the invocation and response of . + /// + [Fact] + public void VerifyChatCompletionServiceSelection() + { + // Arrange + Mock mockService = new(); + Kernel kernel = CreateKernel(mockService.Object); + + // Act + (IChatCompletionService service, PromptExecutionSettings? settings) = ChatCompletionAgent.GetChatCompletionService(kernel, null); + // Assert + Assert.Equal(mockService.Object, service); + Assert.Null(settings); + + // Act + (service, settings) = ChatCompletionAgent.GetChatCompletionService(kernel, []); + // Assert + Assert.Equal(mockService.Object, service); + Assert.Null(settings); + + // Act and Assert + Assert.Throws(() => ChatCompletionAgent.GetChatCompletionService(kernel, new KernelArguments(new PromptExecutionSettings() { ServiceId = "anything" }))); + } + + /// + /// Verify the invocation and response of . + /// + [Fact] + public void VerifyChatCompletionChannelKeys() + { + // Arrange + ChatCompletionAgent agent1 = new(); + ChatCompletionAgent agent2 = new(); + ChatCompletionAgent agent3 = new() { HistoryReducer = new ChatHistoryTruncationReducer(50) }; + ChatCompletionAgent agent4 = new() { HistoryReducer = new ChatHistoryTruncationReducer(50) }; + ChatCompletionAgent agent5 = new() { HistoryReducer = new ChatHistoryTruncationReducer(100) }; + + // Act ans Assert + Assert.Equal(agent1.GetChannelKeys(), agent2.GetChannelKeys()); + Assert.Equal(agent3.GetChannelKeys(), agent4.GetChannelKeys()); + Assert.NotEqual(agent1.GetChannelKeys(), agent3.GetChannelKeys()); + Assert.NotEqual(agent3.GetChannelKeys(), agent5.GetChannelKeys()); + } + private static Kernel CreateKernel(IChatCompletionService chatCompletionService) { var builder = Kernel.CreateBuilder(); diff --git a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs index 6732da6628e8..92aca7fadb67 100644 --- a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs @@ -1,11 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Collections.Generic; using System.Linq; -using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; +using Moq; using Xunit; namespace SemanticKernel.Agents.UnitTests.Core; @@ -22,21 +20,11 @@ public class ChatHistoryChannelTests [Fact] public async Task VerifyAgentWithoutIChatHistoryHandlerAsync() { - TestAgent agent = new(); // Not a IChatHistoryHandler + // Arrange + Mock agent = new(); // Not a IChatHistoryHandler ChatHistoryChannel channel = new(); // Requires IChatHistoryHandler - await Assert.ThrowsAsync(() => channel.InvokeAsync(agent).ToArrayAsync().AsTask()); - } - - private sealed class TestAgent : KernelAgent - { - protected internal override Task CreateChannelAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - protected internal override IEnumerable GetChannelKeys() - { - throw new NotImplementedException(); - } + // Act & Assert + await Assert.ThrowsAsync(() => channel.InvokeAsync(agent.Object).ToArrayAsync().AsTask()); } } diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs index a75533474147..d9042305d9fa 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs @@ -30,8 +30,10 @@ public class ChatHistoryReducerExtensionsTests [InlineData(100, 0, int.MaxValue, 100)] public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? endIndex = null, int? expectedCount = null) { + // Arrange ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)]; + // Act ChatMessageContent[] extractedHistory = history.Extract(startIndex, endIndex).ToArray(); int finalIndex = endIndex ?? messageCount - 1; @@ -39,6 +41,7 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e expectedCount ??= finalIndex - startIndex + 1; + // Assert Assert.Equal(expectedCount, extractedHistory.Length); if (extractedHistory.Length > 0) @@ -58,16 +61,19 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e [InlineData(100, 0)] public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount) { + // Arrange ChatHistory summaries = [.. MockHistoryGenerator.CreateSimpleHistory(summaryCount)]; foreach (ChatMessageContent summary in summaries) { summary.Metadata = new Dictionary() { { "summary", true } }; } + // Act ChatHistory history = [.. summaries, .. MockHistoryGenerator.CreateSimpleHistory(regularCount)]; int finalSummaryIndex = history.LocateSummarizationBoundary("summary"); + // Assert Assert.Equal(summaryCount, finalSummaryIndex); } @@ -77,17 +83,22 @@ public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount) [Fact] public async Task VerifyChatHistoryNotReducedAsync() { + // Arrange ChatHistory history = []; + Mock mockReducer = new(); + mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null); + // Act bool isReduced = await history.ReduceAsync(null, default); + // Assert Assert.False(isReduced); Assert.Empty(history); - Mock mockReducer = new(); - mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null); + // Act isReduced = await history.ReduceAsync(mockReducer.Object, default); + // Assert Assert.False(isReduced); Assert.Empty(history); } @@ -98,13 +109,16 @@ public async Task VerifyChatHistoryNotReducedAsync() [Fact] public async Task VerifyChatHistoryReducedAsync() { + // Arrange Mock mockReducer = new(); mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)[]); ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(10)]; + // Act bool isReduced = await history.ReduceAsync(mockReducer.Object, default); + // Assert Assert.True(isReduced); Assert.Empty(history); } @@ -124,11 +138,13 @@ public async Task VerifyChatHistoryReducedAsync() [InlineData(900, 500, int.MaxValue)] public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount, int? thresholdCount = null) { - // Shape of history doesn't matter since reduction is not expected + // Arrange: Shape of history doesn't matter since reduction is not expected ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.Equal(0, reductionIndex); } @@ -146,11 +162,13 @@ public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount [InlineData(1000, 500, 499)] public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCount, int? thresholdCount = null) { - // Generate history with only assistant messages + // Arrange: Generate history with only assistant messages ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)]; + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.True(reductionIndex > 0); Assert.Equal(targetCount, messageCount - reductionIndex); } @@ -170,17 +188,20 @@ public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCoun [InlineData(1000, 500, 499)] public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int targetCount, int? thresholdCount = null) { - // Generate history with alternating user and assistant messages + // Arrange: Generate history with alternating user and assistant messages ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)]; + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.True(reductionIndex > 0); - // The reduction length should align with a user message, if threshold is specified + // Act: The reduction length should align with a user message, if threshold is specified bool hasThreshold = thresholdCount > 0; int expectedCount = targetCount + (hasThreshold && sourceHistory[^targetCount].Role != AuthorRole.User ? 1 : 0); + // Assert Assert.Equal(expectedCount, messageCount - reductionIndex); } @@ -201,14 +222,16 @@ public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int ta [InlineData(9)] public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, int? thresholdCount = null) { - // Generate a history with function call on index 5 and 9 and + // Arrange: Generate a history with function call on index 5 and 9 and // function result on index 6 and 10 (total length: 14) ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithFunctionContent()]; ChatHistoryTruncationReducer reducer = new(targetCount, thresholdCount); + // Act int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount); + // Assert Assert.True(reductionIndex > 0); // The reduction length avoid splitting function call and result, regardless of threshold @@ -216,7 +239,7 @@ public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, i if (sourceHistory[sourceHistory.Count - targetCount].Items.Any(i => i is FunctionCallContent)) { - expectedCount += 1; + expectedCount++; } else if (sourceHistory[sourceHistory.Count - targetCount].Items.Any(i => i is FunctionResultContent)) { diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs index f464b6a8214a..53e93d0026c3 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs @@ -23,10 +23,12 @@ public class ChatHistorySummarizationReducerTests [InlineData(-1)] [InlineData(-1, int.MaxValue)] [InlineData(int.MaxValue, -1)] - public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? thresholdCount = null) + public void VerifyConstructorArgumentValidation(int targetCount, int? thresholdCount = null) { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); + // Act & Assert Assert.Throws(() => new ChatHistorySummarizationReducer(mockCompletionService.Object, targetCount, thresholdCount)); } @@ -34,15 +36,17 @@ public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? /// Verify object state after initialization. /// [Fact] - public void VerifyChatHistoryInitializationState() + public void VerifyInitializationState() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + // Assert Assert.Equal(ChatHistorySummarizationReducer.DefaultSummarizationPrompt, reducer.SummarizationInstructions); Assert.True(reducer.FailOnError); + // Act reducer = new(mockCompletionService.Object, 10) { @@ -50,25 +54,62 @@ public void VerifyChatHistoryInitializationState() SummarizationInstructions = "instructions", }; + // Assert Assert.NotEqual(ChatHistorySummarizationReducer.DefaultSummarizationPrompt, reducer.SummarizationInstructions); Assert.False(reducer.FailOnError); } + /// + /// Validate equality override. + /// + [Fact] + public void VerifyEquality() + { + // Arrange + Mock mockCompletionService = this.CreateMockCompletionService(); + + ChatHistorySummarizationReducer reducer1 = new(mockCompletionService.Object, 3, 3); + ChatHistorySummarizationReducer reducer2 = new(mockCompletionService.Object, 3, 3); + ChatHistorySummarizationReducer reducer3 = new(mockCompletionService.Object, 3, 3) { UseSingleSummary = false }; + ChatHistorySummarizationReducer reducer4 = new(mockCompletionService.Object, 3, 3) { SummarizationInstructions = "override" }; + ChatHistorySummarizationReducer reducer5 = new(mockCompletionService.Object, 4, 3); + ChatHistorySummarizationReducer reducer6 = new(mockCompletionService.Object, 3, 5); + ChatHistorySummarizationReducer reducer7 = new(mockCompletionService.Object, 3); + ChatHistorySummarizationReducer reducer8 = new(mockCompletionService.Object, 3); + + // Assert + Assert.True(reducer1.Equals(reducer1)); + Assert.True(reducer1.Equals(reducer2)); + Assert.True(reducer7.Equals(reducer8)); + Assert.True(reducer3.Equals(reducer3)); + Assert.True(reducer4.Equals(reducer4)); + Assert.False(reducer1.Equals(reducer3)); + Assert.False(reducer1.Equals(reducer4)); + Assert.False(reducer1.Equals(reducer5)); + Assert.False(reducer1.Equals(reducer6)); + Assert.False(reducer1.Equals(reducer7)); + Assert.False(reducer1.Equals(reducer8)); + Assert.False(reducer1.Equals(null)); + } + /// /// Validate hash-code expresses reducer equivalency. /// [Fact] - public void VerifyChatHistoryHasCode() + public void VerifyHashCode() { + // Arrange HashSet reducers = []; Mock mockCompletionService = this.CreateMockCompletionService(); + // Act int hashCode1 = GenerateHashCode(3, 4); int hashCode2 = GenerateHashCode(33, 44); int hashCode3 = GenerateHashCode(3000, 4000); int hashCode4 = GenerateHashCode(3000, 4000); + // Assert Assert.NotEqual(hashCode1, hashCode2); Assert.NotEqual(hashCode2, hashCode3); Assert.Equal(hashCode3, hashCode4); @@ -90,12 +131,15 @@ int GenerateHashCode(int targetCount, int thresholdCount) [Fact] public async Task VerifyChatHistoryReductionSilentFailureAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(throwException: true); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10) { FailOnError = false }; + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert Assert.Null(reducedHistory); } @@ -105,10 +149,12 @@ public async Task VerifyChatHistoryReductionSilentFailureAsync() [Fact] public async Task VerifyChatHistoryReductionThrowsOnFailureAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(throwException: true); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act and Assert await Assert.ThrowsAsync(() => reducer.ReduceAsync(sourceHistory)); } @@ -118,12 +164,15 @@ public async Task VerifyChatHistoryReductionThrowsOnFailureAsync() [Fact] public async Task VerifyChatHistoryNotReducedAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 20); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert Assert.Null(reducedHistory); } @@ -133,12 +182,15 @@ public async Task VerifyChatHistoryNotReducedAsync() [Fact] public async Task VerifyChatHistoryReducedAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11); VerifySummarization(messages[0]); } @@ -149,19 +201,24 @@ public async Task VerifyChatHistoryReducedAsync() [Fact] public async Task VerifyChatHistoryRereducedAsync() { + // Arrange Mock mockCompletionService = this.CreateMockCompletionService(); IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]); + // Assert ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11); VerifySummarization(messages[0]); + // Act reducer = new(mockCompletionService.Object, 10) { UseSingleSummary = false }; reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]); + // Assert messages = VerifyReducedHistory(reducedHistory, 12); VerifySummarization(messages[0]); VerifySummarization(messages[1]); diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs index eebcf8fc6136..9d8b2e721fdf 100644 --- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs +++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs @@ -21,24 +21,54 @@ public class ChatHistoryTruncationReducerTests [InlineData(-1)] [InlineData(-1, int.MaxValue)] [InlineData(int.MaxValue, -1)] - public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? thresholdCount = null) + public void VerifyConstructorArgumentValidation(int targetCount, int? thresholdCount = null) { + // Act and Assert Assert.Throws(() => new ChatHistoryTruncationReducer(targetCount, thresholdCount)); } + /// + /// Validate equality override. + /// + [Fact] + public void VerifyEquality() + { + // Arrange + ChatHistoryTruncationReducer reducer1 = new(3, 3); + ChatHistoryTruncationReducer reducer2 = new(3, 3); + ChatHistoryTruncationReducer reducer3 = new(4, 3); + ChatHistoryTruncationReducer reducer4 = new(3, 5); + ChatHistoryTruncationReducer reducer5 = new(3); + ChatHistoryTruncationReducer reducer6 = new(3); + + // Assert + Assert.True(reducer1.Equals(reducer1)); + Assert.True(reducer1.Equals(reducer2)); + Assert.True(reducer5.Equals(reducer6)); + Assert.True(reducer3.Equals(reducer3)); + Assert.False(reducer1.Equals(reducer3)); + Assert.False(reducer1.Equals(reducer4)); + Assert.False(reducer1.Equals(reducer5)); + Assert.False(reducer1.Equals(reducer6)); + Assert.False(reducer1.Equals(null)); + } + /// /// Validate hash-code expresses reducer equivalency. /// [Fact] - public void VerifyChatHistoryHasCode() + public void VerifyHashCode() { + // Arrange HashSet reducers = []; + // Act int hashCode1 = GenerateHashCode(3, 4); int hashCode2 = GenerateHashCode(33, 44); int hashCode3 = GenerateHashCode(3000, 4000); int hashCode4 = GenerateHashCode(3000, 4000); + // Assert Assert.NotEqual(hashCode1, hashCode2); Assert.NotEqual(hashCode2, hashCode3); Assert.Equal(hashCode3, hashCode4); @@ -60,11 +90,14 @@ int GenerateHashCode(int targetCount, int thresholdCount) [Fact] public async Task VerifyChatHistoryNotReducedAsync() { + // Arrange IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(10).ToArray(); - ChatHistoryTruncationReducer reducer = new(20); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert Assert.Null(reducedHistory); } @@ -74,11 +107,14 @@ public async Task VerifyChatHistoryNotReducedAsync() [Fact] public async Task VerifyChatHistoryReducedAsync() { + // Arrange IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistoryTruncationReducer reducer = new(10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); + // Assert VerifyReducedHistory(reducedHistory, 10); } @@ -88,12 +124,15 @@ public async Task VerifyChatHistoryReducedAsync() [Fact] public async Task VerifyChatHistoryRereducedAsync() { + // Arrange IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray(); - ChatHistoryTruncationReducer reducer = new(10); + + // Act IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory); reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]); + // Assert VerifyReducedHistory(reducedHistory, 10); } diff --git a/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs index 14a938a7b169..d7f370e3734c 100644 --- a/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs @@ -19,10 +19,12 @@ public class ChatHistoryExtensionsTests [Fact] public void VerifyChatHistoryOrdering() { + // Arrange ChatHistory history = []; history.AddUserMessage("Hi"); history.AddAssistantMessage("Hi"); + // Act and Assert VerifyRole(AuthorRole.User, history.First()); VerifyRole(AuthorRole.Assistant, history.Last()); @@ -36,10 +38,12 @@ public void VerifyChatHistoryOrdering() [Fact] public async Task VerifyChatHistoryOrderingAsync() { + // Arrange ChatHistory history = []; history.AddUserMessage("Hi"); history.AddAssistantMessage("Hi"); + // Act and Assert VerifyRole(AuthorRole.User, history.First()); VerifyRole(AuthorRole.Assistant, history.Last()); diff --git a/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs b/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs index 987c67fce804..504032854ebe 100644 --- a/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs +++ b/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs @@ -1,10 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; -using System.Threading; using System.Threading.Tasks; using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.Internal; using Microsoft.SemanticKernel.ChatCompletion; using Xunit; @@ -22,8 +19,10 @@ public class BroadcastQueueTests [Fact] public void VerifyBroadcastQueueDefaultConfiguration() { + // Arrange BroadcastQueue queue = new(); + // Assert Assert.True(queue.BlockDuration.TotalSeconds > 0); } @@ -33,32 +32,40 @@ public void VerifyBroadcastQueueDefaultConfiguration() [Fact] public async Task VerifyBroadcastQueueReceiveAsync() { - // Create queue and channel. + // Arrange: Create queue and channel. BroadcastQueue queue = new() { BlockDuration = TimeSpan.FromSeconds(0.08), }; - TestChannel channel = new(); + MockChannel channel = new(); ChannelReference reference = new(channel, "test"); - // Verify initial state + // Act: Verify initial state await VerifyReceivingStateAsync(receiveCount: 0, queue, channel, "test"); + + // Assert Assert.Empty(channel.ReceivedMessages); - // Verify empty invocation with no channels. + // Act: Verify empty invocation with no channels. queue.Enqueue([], []); await VerifyReceivingStateAsync(receiveCount: 0, queue, channel, "test"); + + // Assert Assert.Empty(channel.ReceivedMessages); - // Verify empty invocation of channel. + // Act: Verify empty invocation of channel. queue.Enqueue([reference], []); await VerifyReceivingStateAsync(receiveCount: 1, queue, channel, "test"); + + // Assert Assert.Empty(channel.ReceivedMessages); - // Verify expected invocation of channel. + // Act: Verify expected invocation of channel. queue.Enqueue([reference], [new ChatMessageContent(AuthorRole.User, "hi")]); await VerifyReceivingStateAsync(receiveCount: 2, queue, channel, "test"); + + // Assert Assert.NotEmpty(channel.ReceivedMessages); } @@ -68,18 +75,19 @@ public async Task VerifyBroadcastQueueReceiveAsync() [Fact] public async Task VerifyBroadcastQueueFailureAsync() { - // Create queue and channel. + // Arrange: Create queue and channel. BroadcastQueue queue = new() { BlockDuration = TimeSpan.FromSeconds(0.08), }; - BadChannel channel = new(); + MockChannel channel = new() { MockException = new InvalidOperationException("Test") }; ChannelReference reference = new(channel, "test"); - // Verify expected invocation of channel. + // Act: Verify expected invocation of channel. queue.Enqueue([reference], [new ChatMessageContent(AuthorRole.User, "hi")]); + // Assert await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference)); await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference)); await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference)); @@ -91,16 +99,16 @@ public async Task VerifyBroadcastQueueFailureAsync() [Fact] public async Task VerifyBroadcastQueueConcurrencyAsync() { - // Create queue and channel. + // Arrange: Create queue and channel. BroadcastQueue queue = new() { BlockDuration = TimeSpan.FromSeconds(0.08), }; - TestChannel channel = new(); + MockChannel channel = new(); ChannelReference reference = new(channel, "test"); - // Enqueue multiple channels + // Act: Enqueue multiple channels for (int count = 0; count < 10; ++count) { queue.Enqueue([new(channel, $"test{count}")], [new ChatMessageContent(AuthorRole.User, "hi")]); @@ -112,73 +120,14 @@ public async Task VerifyBroadcastQueueConcurrencyAsync() await queue.EnsureSynchronizedAsync(new ChannelReference(channel, $"test{count}")); } - // Verify result + // Assert Assert.NotEmpty(channel.ReceivedMessages); Assert.Equal(10, channel.ReceivedMessages.Count); } - private static async Task VerifyReceivingStateAsync(int receiveCount, BroadcastQueue queue, TestChannel channel, string hash) + private static async Task VerifyReceivingStateAsync(int receiveCount, BroadcastQueue queue, MockChannel channel, string hash) { await queue.EnsureSynchronizedAsync(new ChannelReference(channel, hash)); Assert.Equal(receiveCount, channel.ReceiveCount); } - - private sealed class TestChannel : AgentChannel - { - public TimeSpan ReceiveDuration { get; set; } = TimeSpan.FromSeconds(0.3); - - public int ReceiveCount { get; private set; } - - public List ReceivedMessages { get; } = []; - - protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - protected internal override IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(Agent agent, CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - - protected internal override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken = default) - { - this.ReceivedMessages.AddRange(history); - this.ReceiveCount++; - - await Task.Delay(this.ReceiveDuration, cancellationToken); - } - - protected internal override Task ResetAsync(CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - } - - private sealed class BadChannel : AgentChannel - { - public TimeSpan ReceiveDuration { get; set; } = TimeSpan.FromSeconds(0.1); - - protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - protected internal override IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(Agent agent, CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - - protected internal override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken = default) - { - await Task.Delay(this.ReceiveDuration, cancellationToken); - - throw new InvalidOperationException("Test"); - } - - protected internal override Task ResetAsync(CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - } } diff --git a/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs b/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs index 0a9715f25115..13cc3203d58c 100644 --- a/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs +++ b/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs @@ -17,21 +17,24 @@ public class KeyEncoderTests [Fact] public void VerifyKeyEncoderUniqueness() { + // Act this.VerifyHashEquivalancy([]); this.VerifyHashEquivalancy(nameof(KeyEncoderTests)); this.VerifyHashEquivalancy(nameof(KeyEncoderTests), "http://localhost", "zoo"); - // Verify "well-known" value + // Assert: Verify "well-known" value string localHash = KeyEncoder.GenerateHash([typeof(ChatHistoryChannel).FullName!]); Assert.Equal("Vdx37EnWT9BS+kkCkEgFCg9uHvHNw1+hXMA4sgNMKs4=", localHash); } private void VerifyHashEquivalancy(params string[] keys) { + // Act string hash1 = KeyEncoder.GenerateHash(keys); string hash2 = KeyEncoder.GenerateHash(keys); string hash3 = KeyEncoder.GenerateHash(keys.Concat(["another"])); + // Assert Assert.Equal(hash1, hash2); Assert.NotEqual(hash1, hash3); } diff --git a/dotnet/src/Agents/UnitTests/KernelAgentTests.cs b/dotnet/src/Agents/UnitTests/KernelAgentTests.cs new file mode 100644 index 000000000000..4e4f4e531f4e --- /dev/null +++ b/dotnet/src/Agents/UnitTests/KernelAgentTests.cs @@ -0,0 +1,111 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Linq; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests; + +/// +/// Verify behavior of base class. +/// +public class KernelAgentTests +{ + /// + /// Verify ability to merge null . + /// + [Fact] + public void VerifyNullArgumentMerge() + { + // Arrange + MockAgent agentWithNullArguments = new(); + // Act + KernelArguments? arguments = agentWithNullArguments.MergeArguments(null); + // Assert + Assert.Null(arguments); + + // Arrange + KernelArguments overrideArguments = []; + // Act + arguments = agentWithNullArguments.MergeArguments(overrideArguments); + // Assert + Assert.NotNull(arguments); + Assert.StrictEqual(overrideArguments, arguments); + + // Arrange + MockAgent agentWithEmptyArguments = new() { Arguments = new() }; + // Act + arguments = agentWithEmptyArguments.MergeArguments(null); + // Assert + Assert.NotNull(arguments); + Assert.StrictEqual(agentWithEmptyArguments.Arguments, arguments); + } + + /// + /// Verify ability to merge parameters. + /// + [Fact] + public void VerifyArgumentParameterMerge() + { + // Arrange + MockAgent agentWithArguments = new() { Arguments = new() { { "a", 1 } } }; + KernelArguments overrideArguments = new() { { "b", 2 } }; + + // Act + KernelArguments? arguments = agentWithArguments.MergeArguments(overrideArguments); + + // Assert + Assert.NotNull(arguments); + Assert.Equal(2, arguments.Count); + Assert.Equal(1, arguments["a"]); + Assert.Equal(2, arguments["b"]); + + // Arrange + overrideArguments["a"] = 11; + overrideArguments["c"] = 3; + + // Act + arguments = agentWithArguments.MergeArguments(overrideArguments); + + // Assert + Assert.NotNull(arguments); + Assert.Equal(3, arguments.Count); + Assert.Equal(11, arguments["a"]); + Assert.Equal(2, arguments["b"]); + Assert.Equal(3, arguments["c"]); + } + + /// + /// Verify ability to merge . + /// + [Fact] + public void VerifyArgumentSettingsMerge() + { + // Arrange + FunctionChoiceBehavior autoInvoke = FunctionChoiceBehavior.Auto(); + MockAgent agentWithSettings = new() { Arguments = new(new PromptExecutionSettings() { FunctionChoiceBehavior = autoInvoke }) }; + KernelArguments overrideArgumentsNoSettings = new(); + + // Act + KernelArguments? arguments = agentWithSettings.MergeArguments(overrideArgumentsNoSettings); + + // Assert + Assert.NotNull(arguments); + Assert.NotNull(arguments.ExecutionSettings); + Assert.Single(arguments.ExecutionSettings); + Assert.StrictEqual(autoInvoke, arguments.ExecutionSettings.First().Value.FunctionChoiceBehavior); + + // Arrange + FunctionChoiceBehavior noInvoke = FunctionChoiceBehavior.None(); + KernelArguments overrideArgumentsWithSettings = new(new PromptExecutionSettings() { FunctionChoiceBehavior = noInvoke }); + + // Act + arguments = agentWithSettings.MergeArguments(overrideArgumentsWithSettings); + + // Assert + Assert.NotNull(arguments); + Assert.NotNull(arguments.ExecutionSettings); + Assert.Single(arguments.ExecutionSettings); + Assert.StrictEqual(noInvoke, arguments.ExecutionSettings.First().Value.FunctionChoiceBehavior); + } +} diff --git a/dotnet/src/Agents/UnitTests/MockAgent.cs b/dotnet/src/Agents/UnitTests/MockAgent.cs index b8b7f295e02b..7439cefe1ea3 100644 --- a/dotnet/src/Agents/UnitTests/MockAgent.cs +++ b/dotnet/src/Agents/UnitTests/MockAgent.cs @@ -37,4 +37,10 @@ public override IAsyncEnumerable InvokeStreamingAsy this.InvokeCount++; return this.Response.Select(m => new StreamingChatMessageContent(m.Role, m.Content)).ToAsyncEnumerable(); } + + // Expose protected method for testing + public new KernelArguments? MergeArguments(KernelArguments? arguments) + { + return base.MergeArguments(arguments); + } } diff --git a/dotnet/src/Agents/UnitTests/MockChannel.cs b/dotnet/src/Agents/UnitTests/MockChannel.cs new file mode 100644 index 000000000000..94103129992c --- /dev/null +++ b/dotnet/src/Agents/UnitTests/MockChannel.cs @@ -0,0 +1,68 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; + +namespace SemanticKernel.Agents.UnitTests; + +internal sealed class MockChannel : AgentChannel +{ + public Exception? MockException { get; set; } + + public int InvokeCount { get; private set; } + + public int ReceiveCount { get; private set; } + + public TimeSpan ReceiveDuration { get; set; } = TimeSpan.FromSeconds(0.3); + + public List ReceivedMessages { get; } = []; + + protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + public IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAgentAsync(Agent agent, CancellationToken cancellationToken = default) + => base.InvokeAsync(agent, cancellationToken); + +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(MockAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default) +#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously + { + this.InvokeCount++; + + if (this.MockException is not null) + { + throw this.MockException; + } + + yield break; + } + + protected internal override IAsyncEnumerable InvokeStreamingAsync(MockAgent agent, IList messages, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + + protected internal override async Task ReceiveAsync(IEnumerable history, CancellationToken cancellationToken = default) + { + this.ReceivedMessages.AddRange(history); + this.ReceiveCount++; + + await Task.Delay(this.ReceiveDuration, cancellationToken); + + if (this.MockException is not null) + { + throw this.MockException; + } + } + + protected internal override Task ResetAsync(CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs new file mode 100644 index 000000000000..cd51c736ac18 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs @@ -0,0 +1,46 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +internal static class AssertCollection +{ + public static void Equal(IReadOnlyList? source, IReadOnlyList? target, Func? adapter = null) + { + if (source == null) + { + Assert.Null(target); + return; + } + + Assert.NotNull(target); + Assert.Equal(source.Count, target.Count); + + adapter ??= (x) => x; + + for (int i = 0; i < source.Count; i++) + { + Assert.Equal(adapter(source[i]), adapter(target[i])); + } + } + + public static void Equal(IReadOnlyDictionary? source, IReadOnlyDictionary? target) + { + if (source == null) + { + Assert.Null(target); + return; + } + + Assert.NotNull(target); + Assert.Equal(source.Count, target.Count); + + foreach ((TKey key, TValue value) in source) + { + Assert.True(target.TryGetValue(key, out TValue? targetValue)); + Assert.Equal(value, targetValue); + } + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs index b1e4d397eded..6288c6a5aed8 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs @@ -2,7 +2,7 @@ using System.Linq; using Azure.Core; using Azure.Core.Pipeline; -using Microsoft.SemanticKernel.Agents.OpenAI.Azure; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure; @@ -18,14 +18,17 @@ public class AddHeaderRequestPolicyTests [Fact] public void VerifyAddHeaderRequestPolicyExecution() { + // Arrange using HttpClientTransport clientTransport = new(); HttpPipeline pipeline = new(clientTransport); HttpMessage message = pipeline.CreateMessage(); - AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue"); + + // Act policy.OnSendingRequest(message); + // Assert Assert.Single(message.Request.Headers); HttpHeader header = message.Request.Headers.Single(); Assert.Equal("testname", header.Name); diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs index 0b0a0707e49a..97dbf32903d6 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs @@ -1,7 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; using Xunit; using KernelExtensions = Microsoft.SemanticKernel.Agents.OpenAI; @@ -29,7 +29,10 @@ public void VerifyToMessageRole() private void VerifyRoleConversion(AuthorRole inputRole, MessageRole expectedRole) { + // Arrange MessageRole convertedRole = inputRole.ToMessageRole(); + + // Assert Assert.Equal(expectedRole, convertedRole); } } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs index 3f982f3a7b47..70c27ccb2152 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs @@ -17,11 +17,15 @@ public class KernelExtensionsTests [Fact] public void VerifyGetKernelFunctionLookup() { + // Arrange Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); kernel.Plugins.Add(plugin); + // Act KernelFunction function = kernel.GetKernelFunction($"{nameof(TestPlugin)}-{nameof(TestPlugin.TestFunction)}", '-'); + + // Assert Assert.NotNull(function); Assert.Equal(nameof(TestPlugin.TestFunction), function.Name); } @@ -32,10 +36,12 @@ public void VerifyGetKernelFunctionLookup() [Fact] public void VerifyGetKernelFunctionInvalid() { + // Arrange Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); kernel.Plugins.Add(plugin); + // Act and Assert Assert.Throws(() => kernel.GetKernelFunction("a", '-')); Assert.Throws(() => kernel.GetKernelFunction("a-b", ':')); Assert.Throws(() => kernel.GetKernelFunction("a-b-c", '-')); diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs index eeb8a4d3b9d1..acf195840366 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs @@ -1,9 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; using System.ComponentModel; -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents.OpenAI; +using OpenAI.Assistants; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions; @@ -19,18 +19,28 @@ public class KernelFunctionExtensionsTests [Fact] public void VerifyKernelFunctionToFunctionTool() { + // Arrange KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + // Assert Assert.Equal(2, plugin.FunctionCount); + // Arrange KernelFunction f1 = plugin[nameof(TestPlugin.TestFunction1)]; KernelFunction f2 = plugin[nameof(TestPlugin.TestFunction2)]; - FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin", "-"); - Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.Name, StringComparison.Ordinal); + // Act + FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin"); + + // Assert + Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.FunctionName, StringComparison.Ordinal); Assert.Equal("test description", definition1.Description); - FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin", "-"); - Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.Name, StringComparison.Ordinal); + // Act + FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin"); + + // Assert + Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.FunctionName, StringComparison.Ordinal); Assert.Equal("test description", definition2.Description); } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs new file mode 100644 index 000000000000..59d35d687a2c --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs @@ -0,0 +1,210 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI.Internal; + +/// +/// Unit testing of . +/// +public class AssistantMessageFactoryTests +{ + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsDefault() + { + // Arrange (Setup message with null metadata) + ChatMessageContent message = new(AuthorRole.User, "test"); + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.Empty(options.Metadata); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsWithMetadataEmpty() + { + // Arrange Setup message with empty metadata + ChatMessageContent message = + new(AuthorRole.User, "test") + { + Metadata = new Dictionary() + }; + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.Empty(options.Metadata); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsWithMetadata() + { + // Arrange: Setup message with metadata + ChatMessageContent message = + new(AuthorRole.User, "test") + { + Metadata = + new Dictionary() + { + { "a", 1 }, + { "b", "2" }, + } + }; + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.NotEmpty(options.Metadata); + Assert.Equal(2, options.Metadata.Count); + Assert.Equal("1", options.Metadata["a"]); + Assert.Equal("2", options.Metadata["b"]); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterCreateOptionsWithMetadataNull() + { + // Arrange: Setup message with null metadata value + ChatMessageContent message = + new(AuthorRole.User, "test") + { + Metadata = + new Dictionary() + { + { "a", null }, + { "b", "2" }, + } + }; + + // Act: Create options + MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message); + + // Assert + Assert.NotNull(options); + Assert.NotEmpty(options.Metadata); + Assert.Equal(2, options.Metadata.Count); + Assert.Equal(string.Empty, options.Metadata["a"]); + Assert.Equal("2", options.Metadata["b"]); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageContentsWithText() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new TextContent("test")]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().Text); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageUrl() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new Uri("https://localhost/myimage.png"))]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().ImageUri); + } + + /// + /// Verify options creation. + /// + [Fact(Skip = "API bug with data Uri construction")] + public void VerifyAssistantMessageAdapterGetMessageWithImageData() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new byte[] { 1, 2, 3 }, "image/png")]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().ImageUri); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithImageFile() + { + // Arrange + ChatMessageContent message = new(AuthorRole.User, items: [new FileReferenceContent("file-id")]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Single(contents); + Assert.NotNull(contents.Single().ImageFileId); + } + + /// + /// Verify options creation. + /// + [Fact] + public void VerifyAssistantMessageAdapterGetMessageWithAll() + { + // Arrange + ChatMessageContent message = + new( + AuthorRole.User, + items: + [ + new TextContent("test"), + new ImageContent(new Uri("https://localhost/myimage.png")), + new FileReferenceContent("file-id") + ]); + + // Act + MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray(); + + // Assert + Assert.NotNull(contents); + Assert.Equal(3, contents.Length); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs new file mode 100644 index 000000000000..39d3eb58d11a --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs @@ -0,0 +1,150 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.Agents.OpenAI.Internal; +using OpenAI.Assistants; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI.Internal; + +/// +/// Unit testing of . +/// +public class AssistantRunOptionsFactoryTests +{ + /// + /// Verify run options generation with null . + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsNullTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + ExecutionOptions = + new() + { + AdditionalInstructions = "test", + }, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, null); + + // Assert + Assert.NotNull(options); + Assert.Null(options.InstructionsOverride); + Assert.Null(options.Temperature); + Assert.Null(options.NucleusSamplingFactor); + Assert.Equal("test", options.AdditionalInstructions); + Assert.Empty(options.Metadata); + } + + /// + /// Verify run options generation with equivalent . + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + }; + + OpenAIAssistantInvocationOptions invocationOptions = + new() + { + Temperature = 0.5F, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, "test", invocationOptions); + + // Assert + Assert.NotNull(options); + Assert.Equal("test", options.InstructionsOverride); + Assert.Null(options.Temperature); + Assert.Null(options.NucleusSamplingFactor); + } + + /// + /// Verify run options generation with override. + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + ExecutionOptions = + new() + { + AdditionalInstructions = "test1", + TruncationMessageCount = 5, + }, + }; + + OpenAIAssistantInvocationOptions invocationOptions = + new() + { + AdditionalInstructions = "test2", + Temperature = 0.9F, + TruncationMessageCount = 8, + EnableJsonResponse = true, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions); + + // Assert + Assert.NotNull(options); + Assert.Equal(0.9F, options.Temperature); + Assert.Equal(8, options.TruncationStrategy.LastMessages); + Assert.Equal("test2", options.AdditionalInstructions); + Assert.Equal(AssistantResponseFormat.JsonObject, options.ResponseFormat); + Assert.Null(options.NucleusSamplingFactor); + } + + /// + /// Verify run options generation with metadata. + /// + [Fact] + public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest() + { + // Arrange + OpenAIAssistantDefinition definition = + new("gpt-anything") + { + Temperature = 0.5F, + ExecutionOptions = + new() + { + TruncationMessageCount = 5, + }, + }; + + OpenAIAssistantInvocationOptions invocationOptions = + new() + { + Metadata = new Dictionary + { + { "key1", "value" }, + { "key2", null! }, + }, + }; + + // Act + RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null, invocationOptions); + + // Assert + Assert.Equal(2, options.Metadata.Count); + Assert.Equal("value", options.Metadata["key1"]); + Assert.Equal(string.Empty, options.Metadata["key2"]); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs index 1d9a9ec9dfcf..2730cbbc821a 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs @@ -1,15 +1,18 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.ClientModel; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Net; using System.Net.Http; +using System.Text; using System.Threading.Tasks; -using Azure.AI.OpenAI.Assistants; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Assistants; using Xunit; namespace SemanticKernel.Agents.UnitTests.OpenAI; @@ -30,113 +33,313 @@ public sealed class OpenAIAssistantAgentTests : IDisposable [Fact] public async Task VerifyOpenAIAssistantAgentCreationEmptyAsync() { - OpenAIAssistantDefinition definition = - new() - { - ModelId = "testmodel", - }; - - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple); - - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, - this.CreateTestConfiguration(targetAzure: true, useVersion: true), - definition); + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); - Assert.NotNull(agent); - Assert.NotNull(agent.Id); - Assert.Null(agent.Instructions); - Assert.Null(agent.Name); - Assert.Null(agent.Description); - Assert.False(agent.IsDeleted); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); } /// /// Verify the invocation and response of - /// for an agent with optional properties defined. + /// for an agent with name, instructions, and description. /// [Fact] public async Task VerifyOpenAIAssistantAgentCreationPropertiesAsync() { + // Arrange OpenAIAssistantDefinition definition = - new() + new("testmodel") { - ModelId = "testmodel", Name = "testname", Description = "testdescription", Instructions = "testinstructions", }; - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentFull); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } - OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, - this.CreateTestConfiguration(), - definition); + /// + /// Verify the invocation and response of + /// for an agent with name, instructions, and description from a template. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationDefaultTemplateAsync() + { + // Arrange + PromptTemplateConfig templateConfig = + new("test instructions") + { + Name = "testname", + Description = "testdescription", + }; - Assert.NotNull(agent); - Assert.NotNull(agent.Id); - Assert.NotNull(agent.Instructions); - Assert.NotNull(agent.Name); - Assert.NotNull(agent.Description); - Assert.False(agent.IsDeleted); + OpenAIAssistantCapabilities capabilities = new("testmodel"); + + // Act and Assert + await this.VerifyAgentTemplateAsync(capabilities, templateConfig); + + // Act and Assert + await this.VerifyAgentTemplateAsync(capabilities, templateConfig, new KernelPromptTemplateFactory()); } /// /// Verify the invocation and response of - /// for an agent that has all properties defined.. + /// for an agent with code-interpreter enabled. /// [Fact] - public async Task VerifyOpenAIAssistantAgentCreationEverythingAsync() + public async Task VerifyOpenAIAssistantAgentCreationWithCodeInterpreterAsync() { + // Arrange OpenAIAssistantDefinition definition = - new() + new("testmodel") { - ModelId = "testmodel", EnableCodeInterpreter = true, - EnableRetrieval = true, - FileIds = ["#1", "#2"], - Metadata = new Dictionary() { { "a", "1" } }, }; - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentWithEverything); + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with code-interpreter files. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithCodeInterpreterFilesAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableCodeInterpreter = true, + CodeInterpreterFileIds = ["file1", "file2"], + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with a file-search and no vector-store + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithFileSearchAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableFileSearch = true, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with a vector-store-id (for file-search). + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithVectorStoreAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableFileSearch = true, + VectorStoreId = "#vs1", + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with metadata. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithMetadataAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + Metadata = new Dictionary() + { + { "a", "1" }, + { "b", "2" }, + }, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with json-response mode enabled. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithJsonResponseAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + EnableJsonResponse = true, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with temperature defined. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithTemperatureAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + Temperature = 2.0F, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with topP defined. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithTopPAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + TopP = 2.0F, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with empty execution settings. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithEmptyExecutionOptionsAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + ExecutionOptions = new OpenAIAssistantExecutionOptions(), + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with populated execution settings. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithExecutionOptionsAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + ExecutionOptions = + new() + { + MaxCompletionTokens = 100, + ParallelToolCallsEnabled = false, + } + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of + /// for an agent with execution settings and meta-data. + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreationWithEmptyExecutionOptionsAndMetadataAsync() + { + // Arrange + OpenAIAssistantDefinition definition = + new("testmodel") + { + ExecutionOptions = new(), + Metadata = new Dictionary() + { + { "a", "1" }, + { "b", "2" }, + }, + }; + + // Act and Assert + await this.VerifyAgentCreationAsync(definition); + } + + /// + /// Verify the invocation and response of . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentRetrievalAsync() + { + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); + + this.SetupResponse(HttpStatusCode.OK, definition); OpenAIAssistantAgent agent = - await OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, + await OpenAIAssistantAgent.RetrieveAsync( this.CreateTestConfiguration(), - definition); + "#id", + this._emptyKernel); - Assert.NotNull(agent); - Assert.Equal(2, agent.Tools.Count); - Assert.True(agent.Tools.OfType().Any()); - Assert.True(agent.Tools.OfType().Any()); - Assert.NotEmpty(agent.FileIds); - Assert.NotEmpty(agent.Metadata); + // Act and Assert + ValidateAgentDefinition(agent, definition); } /// /// Verify the invocation and response of . /// [Fact] - public async Task VerifyOpenAIAssistantAgentRetrieveAsync() + public async Task VerifyOpenAIAssistantAgentRetrievalWithFactoryAsync() { - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple); + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); + + this.SetupResponse(HttpStatusCode.OK, definition); OpenAIAssistantAgent agent = await OpenAIAssistantAgent.RetrieveAsync( - this._emptyKernel, this.CreateTestConfiguration(), - "#id"); + "#id", + this._emptyKernel, + new KernelArguments(), + new KernelPromptTemplateFactory()); - Assert.NotNull(agent); - Assert.NotNull(agent.Id); - Assert.Null(agent.Instructions); - Assert.Null(agent.Name); - Assert.Null(agent.Description); - Assert.False(agent.IsDeleted); + // Act and Assert + ValidateAgentDefinition(agent, definition); } /// @@ -145,36 +348,148 @@ await OpenAIAssistantAgent.RetrieveAsync( [Fact] public async Task VerifyOpenAIAssistantAgentDeleteAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + // Assert Assert.False(agent.IsDeleted); - this.SetupResponse(HttpStatusCode.OK, ResponseContent.DeleteAgent); + // Arrange + this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteAgent); + // Act await agent.DeleteAsync(); + // Assert Assert.True(agent.IsDeleted); + // Act await agent.DeleteAsync(); // Doesn't throw + // Assert Assert.True(agent.IsDeleted); + await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test"))); + await Assert.ThrowsAsync(() => agent.GetThreadMessagesAsync("threadid").ToArrayAsync().AsTask()); + await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask()); + await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid").ToArrayAsync().AsTask()); + await Assert.ThrowsAsync(() => agent.InvokeStreamingAsync("threadid", new OpenAIAssistantInvocationOptions()).ToArrayAsync().AsTask()); } /// - /// Verify complex chat interaction across multiple states. + /// Verify the creating a thread via . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentCreateThreadAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread); + + // Act + string threadId = await agent.CreateThreadAsync(); + // Assert + Assert.NotNull(threadId); + + // Arrange + this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.CreateThread); + // Act + threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions()); + // Assert + Assert.NotNull(threadId); + } + + /// + /// Verify the deleting a thread via . /// [Fact] - public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync() + public async Task VerifyOpenAIAssistantAgentDeleteThreadAsync() { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteThread); + + // Act + bool isDeleted = await agent.DeleteThreadAsync("threadid"); + // Assert + Assert.True(isDeleted); + } + + /// + /// Verify the deleting a thread via . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentUploadFileAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.UploadFile); + + // Act + using MemoryStream stream = new(Encoding.UTF8.GetBytes("test")); + string fileId = await agent.UploadFileAsync(stream, "text.txt"); + + // Assert + Assert.NotNull(fileId); + } + + /// + /// Verify invocation via . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentGroupChatAsync() + { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( HttpStatusCode.OK, - ResponseContent.CreateThread, - ResponseContent.CreateRun, - ResponseContent.CompletedRun, - ResponseContent.MessageSteps, - ResponseContent.GetTextMessage); + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage()); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert + Assert.Single(messages); + Assert.Single(messages[0].Items); + Assert.IsType(messages[0].Items[0]); + + // Arrange + this.SetupResponse(HttpStatusCode.OK, OpenAIAssistantResponseContent.DeleteThread); + + // Act + await chat.ResetAsync(); + + // Assert + Assert.Empty(this._messageHandlerStub.ResponseQueue); + } + + /// + /// Verify direction invocation of . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentInvokeAsync() + { + // Arrange + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + this.SetupResponses( + HttpStatusCode.OK, + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage()); + + // Act + ChatMessageContent[] messages = await agent.InvokeAsync("threadid").ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Single(messages[0].Items); Assert.IsType(messages[0].Items[0]); @@ -186,18 +501,23 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync() [Fact] public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( HttpStatusCode.OK, - ResponseContent.CreateThread, - ResponseContent.CreateRun, - ResponseContent.CompletedRun, - ResponseContent.MessageSteps, - ResponseContent.GetTextMessageWithAnnotation); + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessageWithAnnotation); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Equal(2, messages[0].Items.Count); Assert.NotNull(messages[0].Items.SingleOrDefault(c => c is TextContent)); @@ -210,18 +530,23 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync() [Fact] public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( HttpStatusCode.OK, - ResponseContent.CreateThread, - ResponseContent.CreateRun, - ResponseContent.CompletedRun, - ResponseContent.MessageSteps, - ResponseContent.GetImageMessage); + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetImageMessage); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Single(messages[0].Items); Assert.IsType(messages[0].Items[0]); @@ -233,31 +558,76 @@ public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync() [Fact] public async Task VerifyOpenAIAssistantAgentGetMessagesAsync() { - // Create agent + // Arrange: Create agent OpenAIAssistantAgent agent = await this.CreateAgentAsync(); // Initialize agent channel this.SetupResponses( HttpStatusCode.OK, - ResponseContent.CreateThread, - ResponseContent.CreateRun, - ResponseContent.CompletedRun, - ResponseContent.MessageSteps, - ResponseContent.GetTextMessage); + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage()); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + // Assert Assert.Single(messages); - // Setup messages + // Arrange: Setup messages this.SetupResponses( HttpStatusCode.OK, - ResponseContent.ListMessagesPageMore, - ResponseContent.ListMessagesPageMore, - ResponseContent.ListMessagesPageFinal); + OpenAIAssistantResponseContent.ListMessagesPageMore, + OpenAIAssistantResponseContent.ListMessagesPageMore, + OpenAIAssistantResponseContent.ListMessagesPageFinal); - // Get messages and verify + // Act: Get messages messages = await chat.GetChatMessagesAsync(agent).ToArrayAsync(); + // Assert + Assert.Equal(5, messages.Length); + } + + /// + /// Verify message retrieval via . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentAddThreadMessagesAsync() + { + // Arrange: Create agent + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + // Arrange: Setup messages + this.SetupResponses( + HttpStatusCode.OK, + OpenAIAssistantResponseContent.GetTextMessage()); + + // Act (no exception) + await agent.AddChatMessageAsync(agent.Id, new ChatMessageContent(AuthorRole.User, "hi")); + Assert.Empty(this._messageHandlerStub.ResponseQueue); + } + + /// + /// Verify message retrieval via . + /// + [Fact] + public async Task VerifyOpenAIAssistantAgentGetThreadMessagesAsync() + { + // Arrange: Create agent + OpenAIAssistantAgent agent = await this.CreateAgentAsync(); + + // Arrange: Setup messages + this.SetupResponses( + HttpStatusCode.OK, + OpenAIAssistantResponseContent.ListMessagesPageMore, + OpenAIAssistantResponseContent.ListMessagesPageMore, + OpenAIAssistantResponseContent.ListMessagesPageFinal); + + // Act: Get messages + ChatMessageContent[] messages = await agent.GetThreadMessagesAsync("threadid").ToArrayAsync(); + + // Assert Assert.Equal(5, messages.Length); } @@ -267,24 +637,30 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync() [Fact] public async Task VerifyOpenAIAssistantAgentAddMessagesAsync() { - // Create agent + // Arrange: Create agent OpenAIAssistantAgent agent = await this.CreateAgentAsync(); // Initialize agent channel this.SetupResponses( HttpStatusCode.OK, - ResponseContent.CreateThread, - ResponseContent.CreateRun, - ResponseContent.CompletedRun, - ResponseContent.MessageSteps, - ResponseContent.GetTextMessage); + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage()); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + // Assert Assert.Single(messages); + // Arrange chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "hi")); + // Act messages = await chat.GetChatMessagesAsync().ToArrayAsync(); + // Assert Assert.Equal(2, messages.Length); } @@ -294,28 +670,33 @@ public async Task VerifyOpenAIAssistantAgentAddMessagesAsync() [Fact] public async Task VerifyOpenAIAssistantAgentListDefinitionAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); this.SetupResponses( HttpStatusCode.OK, - ResponseContent.ListAgentsPageMore, - ResponseContent.ListAgentsPageMore, - ResponseContent.ListAgentsPageFinal); + OpenAIAssistantResponseContent.ListAgentsPageMore, + OpenAIAssistantResponseContent.ListAgentsPageMore, + OpenAIAssistantResponseContent.ListAgentsPageFinal); + // Act var messages = await OpenAIAssistantAgent.ListDefinitionsAsync( this.CreateTestConfiguration()).ToArrayAsync(); + // Assert Assert.Equal(7, messages.Length); + // Arrange this.SetupResponses( HttpStatusCode.OK, - ResponseContent.ListAgentsPageMore, - ResponseContent.ListAgentsPageMore); + OpenAIAssistantResponseContent.ListAgentsPageMore, + OpenAIAssistantResponseContent.ListAgentsPageFinal); + // Act messages = await OpenAIAssistantAgent.ListDefinitionsAsync( - this.CreateTestConfiguration(), - maxResults: 4).ToArrayAsync(); + this.CreateTestConfiguration()).ToArrayAsync(); + // Assert Assert.Equal(4, messages.Length); } @@ -325,6 +706,7 @@ await OpenAIAssistantAgent.ListDefinitionsAsync( [Fact] public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync() { + // Arrange OpenAIAssistantAgent agent = await this.CreateAgentAsync(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); @@ -332,17 +714,21 @@ public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync() this.SetupResponses( HttpStatusCode.OK, - ResponseContent.CreateThread, - ResponseContent.CreateRun, - ResponseContent.PendingRun, - ResponseContent.ToolSteps, - ResponseContent.ToolResponse, - ResponseContent.CompletedRun, - ResponseContent.MessageSteps, - ResponseContent.GetTextMessage); + OpenAIAssistantResponseContent.CreateThread, + OpenAIAssistantResponseContent.Run.CreateRun, + OpenAIAssistantResponseContent.Run.PendingRun, + OpenAIAssistantResponseContent.Run.ToolSteps, + OpenAIAssistantResponseContent.ToolResponse, + OpenAIAssistantResponseContent.Run.CompletedRun, + OpenAIAssistantResponseContent.Run.MessageSteps, + OpenAIAssistantResponseContent.GetTextMessage()); AgentGroupChat chat = new(); + + // Act ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + + // Assert Assert.Single(messages); Assert.Single(messages[0].Items); Assert.IsType(messages[0].Items[0]); @@ -365,531 +751,158 @@ public OpenAIAssistantAgentTests() this._emptyKernel = new Kernel(); } - private Task CreateAgentAsync() + private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition) { - OpenAIAssistantDefinition definition = - new() - { - ModelId = "testmodel", - }; + this.SetupResponse(HttpStatusCode.OK, definition); - this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple); - - return - OpenAIAssistantAgent.CreateAsync( - this._emptyKernel, + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( this.CreateTestConfiguration(), - definition); - } + definition, + this._emptyKernel); - private OpenAIAssistantConfiguration CreateTestConfiguration(bool targetAzure = false, bool useVersion = false) - { - return new(apiKey: "fakekey", endpoint: targetAzure ? "https://localhost" : null) - { - HttpClient = this._httpClient, - Version = useVersion ? AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview : null, - }; + ValidateAgentDefinition(agent, definition); } - private void SetupResponse(HttpStatusCode statusCode, string content) + private async Task VerifyAgentTemplateAsync( + OpenAIAssistantCapabilities capabilities, + PromptTemplateConfig templateConfig, + IPromptTemplateFactory? templateFactory = null) { - this._messageHandlerStub.ResponseToReturn = - new(statusCode) - { - Content = new StringContent(content) - }; + this.SetupResponse(HttpStatusCode.OK, capabilities, templateConfig); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateFromTemplateAsync( + this.CreateTestConfiguration(), + capabilities, + this._emptyKernel, + new KernelArguments(), + templateConfig, + templateFactory); + + ValidateAgentDefinition(agent, capabilities, templateConfig); } - private void SetupResponses(HttpStatusCode statusCode, params string[] content) + private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantDefinition expectedConfig) { - foreach (var item in content) - { -#pragma warning disable CA2000 // Dispose objects before losing scope - this._messageHandlerStub.ResponseQueue.Enqueue( - new(statusCode) - { - Content = new StringContent(item) - }); -#pragma warning restore CA2000 // Dispose objects before losing scope - } + ValidateAgent(agent, expectedConfig.Name, expectedConfig.Instructions, expectedConfig.Description, expectedConfig); } - private sealed class MyPlugin + private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantCapabilities expectedConfig, PromptTemplateConfig templateConfig) { - [KernelFunction] - public void MyFunction(int index) - { } + ValidateAgent(agent, templateConfig.Name, templateConfig.Template, templateConfig.Description, expectedConfig); } - private static class ResponseContent + private static void ValidateAgent( + OpenAIAssistantAgent agent, + string? expectedName, + string? expectedInstructions, + string? expectedDescription, + OpenAIAssistantCapabilities expectedConfig) { - public const string CreateAgentSimple = - """ - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": null, - "description": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "file_ids": [], - "metadata": {} - } - """; - - public const string CreateAgentFull = - """ - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": "testname", - "description": "testdescription", - "model": "gpt-4-turbo", - "instructions": "testinstructions", - "tools": [], - "file_ids": [], - "metadata": {} - } - """; - - public const string CreateAgentWithEverything = - """ - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698984975, - "name": null, - "description": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [ - { - "type": "code_interpreter" - }, - { - "type": "retrieval" - } - ], - "file_ids": ["#1", "#2"], - "metadata": {"a": "1"} - } - """; + // Verify fundamental state + Assert.NotNull(agent); + Assert.NotNull(agent.Id); + Assert.False(agent.IsDeleted); + Assert.NotNull(agent.Definition); + Assert.Equal(expectedConfig.ModelId, agent.Definition.ModelId); + + // Verify core properties + Assert.Equal(expectedInstructions ?? string.Empty, agent.Instructions); + Assert.Equal(expectedName ?? string.Empty, agent.Name); + Assert.Equal(expectedDescription ?? string.Empty, agent.Description); + + // Verify options + Assert.Equal(expectedConfig.Temperature, agent.Definition.Temperature); + Assert.Equal(expectedConfig.TopP, agent.Definition.TopP); + Assert.Equal(expectedConfig.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens); + Assert.Equal(expectedConfig.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens); + Assert.Equal(expectedConfig.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled); + Assert.Equal(expectedConfig.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount); + + // Verify tool definitions + int expectedToolCount = 0; + + bool hasCodeInterpreter = false; + if (expectedConfig.EnableCodeInterpreter) + { + hasCodeInterpreter = true; + ++expectedToolCount; + } - public const string DeleteAgent = - """ - { - "id": "asst_abc123", - "object": "assistant.deleted", - "deleted": true - } - """; + Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any()); - public const string CreateThread = - """ - { - "id": "thread_abc123", - "object": "thread", - "created_at": 1699012949, - "metadata": {} - } - """; + bool hasFileSearch = false; + if (expectedConfig.EnableFileSearch) + { + hasFileSearch = true; + ++expectedToolCount; + } - public const string CreateRun = - """ - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699063290, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "queued", - "started_at": 1699063290, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699063291, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "file_ids": [], - "metadata": {}, - "usage": null, - "temperature": 1 - } - """; + Assert.Equal(hasFileSearch, agent.Tools.OfType().Any()); - public const string PendingRun = - """ - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699063290, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "requires_action", - "started_at": 1699063290, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699063291, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "file_ids": [], - "metadata": {}, - "usage": null, - "temperature": 1 - } - """; + Assert.Equal(expectedToolCount, agent.Tools.Count); - public const string CompletedRun = - """ - { - "id": "run_abc123", - "object": "thread.run", - "created_at": 1699063290, - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "status": "completed", - "started_at": 1699063290, - "expires_at": null, - "cancelled_at": null, - "failed_at": null, - "completed_at": 1699063291, - "last_error": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "file_ids": [], - "metadata": {}, - "usage": null, - "temperature": 1 - } - """; + // Verify metadata + Assert.NotNull(agent.Definition.Metadata); + if (expectedConfig.ExecutionOptions == null) + { + Assert.Equal(expectedConfig.Metadata ?? new Dictionary(), agent.Definition.Metadata); + } + else // Additional metadata present when execution options are defined + { + Assert.Equal((expectedConfig.Metadata?.Count ?? 0) + 1, agent.Definition.Metadata.Count); - public const string MessageSteps = - """ + if (expectedConfig.Metadata != null) { - "object": "list", - "data": [ + foreach (var (key, value) in expectedConfig.Metadata) { - "id": "step_abc123", - "object": "thread.run.step", - "created_at": 1699063291, - "run_id": "run_abc123", - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "type": "message_creation", - "status": "completed", - "cancelled_at": null, - "completed_at": 1699063291, - "expired_at": null, - "failed_at": null, - "last_error": null, - "step_details": { - "type": "message_creation", - "message_creation": { - "message_id": "msg_abc123" - } - }, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 - } + string? targetValue = agent.Definition.Metadata[key]; + Assert.NotNull(targetValue); + Assert.Equal(value, targetValue); } - ], - "first_id": "step_abc123", - "last_id": "step_abc456", - "has_more": false } - """; + } - public const string ToolSteps = - """ - { - "object": "list", - "data": [ - { - "id": "step_abc987", - "object": "thread.run.step", - "created_at": 1699063291, - "run_id": "run_abc123", - "assistant_id": "asst_abc123", - "thread_id": "thread_abc123", - "type": "tool_calls", - "status": "in_progress", - "cancelled_at": null, - "completed_at": 1699063291, - "expired_at": null, - "failed_at": null, - "last_error": null, - "step_details": { - "type": "tool_calls", - "tool_calls": [ - { - "id": "tool_1", - "type": "function", - "function": { - "name": "MyPlugin-MyFunction", - "arguments": "{ \"index\": 3 }", - "output": "test" - } - } - ] - }, - "usage": { - "prompt_tokens": 123, - "completion_tokens": 456, - "total_tokens": 579 - } - } - ], - "first_id": "step_abc123", - "last_id": "step_abc456", - "has_more": false - } - """; + // Verify detail definition + Assert.Equal(expectedConfig.VectorStoreId, agent.Definition.VectorStoreId); + Assert.Equal(expectedConfig.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds); + } - public const string ToolResponse = "{ }"; + private Task CreateAgentAsync() + { + OpenAIAssistantDefinition definition = new("testmodel"); - public const string GetImageMessage = - """ - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699017614, - "thread_id": "thread_abc123", - "role": "user", - "content": [ - { - "type": "image_file", - "image_file": { - "file_id": "file_123" - } - } - ], - "assistant_id": "asst_abc123", - "run_id": "run_abc123" - } - """; + this.SetupResponse(HttpStatusCode.OK, definition); - public const string GetTextMessage = - """ - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699017614, - "thread_id": "thread_abc123", - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } - } - ], - "assistant_id": "asst_abc123", - "run_id": "run_abc123" - } - """; + return + OpenAIAssistantAgent.CreateAsync( + this.CreateTestConfiguration(), + definition, + this._emptyKernel); + } - public const string GetTextMessageWithAnnotation = - """ - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699017614, - "thread_id": "thread_abc123", - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.**f1", - "annotations": [ - { - "type": "file_citation", - "text": "**f1", - "file_citation": { - "file_id": "file_123", - "quote": "does" - }, - "start_index": 3, - "end_index": 6 - } - ] - } - } - ], - "assistant_id": "asst_abc123", - "run_id": "run_abc123" - } - """; + private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false) + => targetAzure ? + OpenAIClientProvider.ForAzureOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: new Uri("https://localhost"), this._httpClient) : + OpenAIClientProvider.ForOpenAI(apiKey: new ApiKeyCredential("fakekey"), endpoint: null, this._httpClient); - public const string ListAgentsPageMore = - """ - { - "object": "list", - "data": [ - { - "id": "asst_abc123", - "object": "assistant", - "created_at": 1698982736, - "name": "Coding Tutor", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are a helpful assistant designed to make me better at coding!", - "tools": [], - "file_ids": [], - "metadata": {} - }, - { - "id": "asst_abc456", - "object": "assistant", - "created_at": 1698982718, - "name": "My Assistant", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are a helpful assistant designed to make me better at coding!", - "tools": [], - "file_ids": [], - "metadata": {} - }, - { - "id": "asst_abc789", - "object": "assistant", - "created_at": 1698982643, - "name": null, - "description": null, - "model": "gpt-4-turbo", - "instructions": null, - "tools": [], - "file_ids": [], - "metadata": {} - } - ], - "first_id": "asst_abc123", - "last_id": "asst_abc789", - "has_more": true - } - """; + private void SetupResponse(HttpStatusCode statusCode, string content) => + this._messageHandlerStub.SetupResponses(statusCode, content); - public const string ListAgentsPageFinal = - """ - { - "object": "list", - "data": [ - { - "id": "asst_abc789", - "object": "assistant", - "created_at": 1698982736, - "name": "Coding Tutor", - "description": null, - "model": "gpt-4-turbo", - "instructions": "You are a helpful assistant designed to make me better at coding!", - "tools": [], - "file_ids": [], - "metadata": {} - } - ], - "first_id": "asst_abc789", - "last_id": "asst_abc789", - "has_more": false - } - """; + private void SetupResponse(HttpStatusCode statusCode, OpenAIAssistantDefinition definition) => + this._messageHandlerStub.SetupResponses(statusCode, OpenAIAssistantResponseContent.AssistantDefinition(definition)); - public const string ListMessagesPageMore = - """ - { - "object": "list", - "data": [ - { - "id": "msg_abc123", - "object": "thread.message", - "created_at": 1699016383, - "thread_id": "thread_abc123", - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } - } - ], - "file_ids": [], - "assistant_id": null, - "run_id": null, - "metadata": {} - }, - { - "id": "msg_abc456", - "object": "thread.message", - "created_at": 1699016383, - "thread_id": "thread_abc123", - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "Hello, what is AI?", - "annotations": [] - } - } - ], - "file_ids": [ - "file-abc123" - ], - "assistant_id": null, - "run_id": null, - "metadata": {} - } - ], - "first_id": "msg_abc123", - "last_id": "msg_abc456", - "has_more": true - } - """; + private void SetupResponse(HttpStatusCode statusCode, OpenAIAssistantCapabilities capabilities, PromptTemplateConfig templateConfig) => + this._messageHandlerStub.SetupResponses(statusCode, OpenAIAssistantResponseContent.AssistantDefinition(capabilities, templateConfig)); - public const string ListMessagesPageFinal = - """ - { - "object": "list", - "data": [ - { - "id": "msg_abc789", - "object": "thread.message", - "created_at": 1699016383, - "thread_id": "thread_abc123", - "role": "user", - "content": [ - { - "type": "text", - "text": { - "value": "How does AI work? Explain it in simple terms.", - "annotations": [] - } - } - ], - "file_ids": [], - "assistant_id": null, - "run_id": null, - "metadata": {} - } - ], - "first_id": "msg_abc789", - "last_id": "msg_abc789", - "has_more": false - } - """; + private void SetupResponses(HttpStatusCode statusCode, params string[] content) => + this._messageHandlerStub.SetupResponses(statusCode, content); + + private sealed class MyPlugin + { + [KernelFunction] + public void MyFunction(int index) + { } } } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs deleted file mode 100644 index 3708ab50ab97..000000000000 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs +++ /dev/null @@ -1,61 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; -using System.Net.Http; -using Azure.AI.OpenAI.Assistants; -using Microsoft.SemanticKernel.Agents.OpenAI; -using Xunit; - -namespace SemanticKernel.Agents.UnitTests.OpenAI; - -/// -/// Unit testing of . -/// -public class OpenAIAssistantConfigurationTests -{ - /// - /// Verify initial state. - /// - [Fact] - public void VerifyOpenAIAssistantConfigurationInitialState() - { - OpenAIAssistantConfiguration config = new(apiKey: "testkey"); - - Assert.Equal("testkey", config.ApiKey); - Assert.Null(config.Endpoint); - Assert.Null(config.HttpClient); - Assert.Null(config.Version); - } - - /// - /// Verify assignment. - /// - [Fact] - public void VerifyOpenAIAssistantConfigurationAssignment() - { - using HttpClient client = new(); - - OpenAIAssistantConfiguration config = - new(apiKey: "testkey", endpoint: "https://localhost") - { - HttpClient = client, - Version = AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview, - }; - - Assert.Equal("testkey", config.ApiKey); - Assert.Equal("https://localhost", config.Endpoint); - Assert.NotNull(config.HttpClient); - Assert.Equal(AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview, config.Version); - } - - /// - /// Verify secure endpoint. - /// - [Fact] - public void VerifyOpenAIAssistantConfigurationThrows() - { - using HttpClient client = new(); - - Assert.Throws( - () => new OpenAIAssistantConfiguration(apiKey: "testkey", endpoint: "http://localhost")); - } -} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs index b17b61211c18..b0131ac9be6b 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.Collections.Generic; +using System.Text.Json; using Microsoft.SemanticKernel.Agents.OpenAI; using Xunit; @@ -16,17 +17,27 @@ public class OpenAIAssistantDefinitionTests [Fact] public void VerifyOpenAIAssistantDefinitionInitialState() { - OpenAIAssistantDefinition definition = new(); + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); - Assert.Null(definition.Id); + // Assert + Assert.Equal(string.Empty, definition.Id); + Assert.Equal("testmodel", definition.ModelId); Assert.Null(definition.Name); - Assert.Null(definition.ModelId); Assert.Null(definition.Instructions); Assert.Null(definition.Description); Assert.Null(definition.Metadata); - Assert.Null(definition.FileIds); + Assert.Null(definition.ExecutionOptions); + Assert.Null(definition.Temperature); + Assert.Null(definition.TopP); + Assert.False(definition.EnableFileSearch); + Assert.Null(definition.VectorStoreId); + Assert.Null(definition.CodeInterpreterFileIds); Assert.False(definition.EnableCodeInterpreter); - Assert.False(definition.EnableRetrieval); + Assert.False(definition.EnableJsonResponse); + + // Act and Assert + ValidateSerialization(definition); } /// @@ -35,28 +46,104 @@ public void VerifyOpenAIAssistantDefinitionInitialState() [Fact] public void VerifyOpenAIAssistantDefinitionAssignment() { + // Arrange OpenAIAssistantDefinition definition = - new() + new("testmodel") { Id = "testid", Name = "testname", - ModelId = "testmodel", Instructions = "testinstructions", Description = "testdescription", - FileIds = ["id"], + EnableFileSearch = true, + VectorStoreId = "#vs", Metadata = new Dictionary() { { "a", "1" } }, + Temperature = 2, + TopP = 0, + ExecutionOptions = + new() + { + AdditionalInstructions = "test instructions", + MaxCompletionTokens = 1000, + MaxPromptTokens = 1000, + ParallelToolCallsEnabled = false, + TruncationMessageCount = 12, + }, + CodeInterpreterFileIds = ["file1"], EnableCodeInterpreter = true, - EnableRetrieval = true, + EnableJsonResponse = true, }; + // Assert Assert.Equal("testid", definition.Id); Assert.Equal("testname", definition.Name); Assert.Equal("testmodel", definition.ModelId); Assert.Equal("testinstructions", definition.Instructions); Assert.Equal("testdescription", definition.Description); + Assert.True(definition.EnableFileSearch); + Assert.Equal("#vs", definition.VectorStoreId); + Assert.Equal(2, definition.Temperature); + Assert.Equal(0, definition.TopP); + Assert.NotNull(definition.ExecutionOptions); + Assert.Equal("test instructions", definition.ExecutionOptions.AdditionalInstructions); + Assert.Equal(1000, definition.ExecutionOptions.MaxCompletionTokens); + Assert.Equal(1000, definition.ExecutionOptions.MaxPromptTokens); + Assert.Equal(12, definition.ExecutionOptions.TruncationMessageCount); + Assert.False(definition.ExecutionOptions.ParallelToolCallsEnabled); Assert.Single(definition.Metadata); - Assert.Single(definition.FileIds); + Assert.Single(definition.CodeInterpreterFileIds); Assert.True(definition.EnableCodeInterpreter); - Assert.True(definition.EnableRetrieval); + Assert.True(definition.EnableJsonResponse); + + // Act and Assert + ValidateSerialization(definition); + } + + /// + /// Verify TemplateFactoryFormat. + /// + [Fact] + public void VerifyOpenAIAssistantDefinitionTemplateFactoryFormat() + { + // Arrange + OpenAIAssistantDefinition definition = new("testmodel"); + + // Assert + Assert.Null(definition.TemplateFactoryFormat); + + // Act + definition = new("testmodel") + { + Metadata = new Dictionary() { { OpenAIAssistantAgent.TemplateMetadataKey, "testformat" } } + }; + + // Assert + Assert.Equal("testformat", definition.TemplateFactoryFormat); + } + + private static void ValidateSerialization(OpenAIAssistantDefinition source) + { + string json = JsonSerializer.Serialize(source); + + OpenAIAssistantDefinition? target = JsonSerializer.Deserialize(json); + + Assert.NotNull(target); + Assert.Equal(source.Id, target.Id); + Assert.Equal(source.Name, target.Name); + Assert.Equal(source.ModelId, target.ModelId); + Assert.Equal(source.Instructions, target.Instructions); + Assert.Equal(source.Description, target.Description); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + Assert.Equal(source.VectorStoreId, target.VectorStoreId); + Assert.Equal(source.Temperature, target.Temperature); + Assert.Equal(source.TopP, target.TopP); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + Assert.Equal(source.VectorStoreId, target.VectorStoreId); + Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter); + Assert.Equal(source.ExecutionOptions?.MaxCompletionTokens, target.ExecutionOptions?.MaxCompletionTokens); + Assert.Equal(source.ExecutionOptions?.MaxPromptTokens, target.ExecutionOptions?.MaxPromptTokens); + Assert.Equal(source.ExecutionOptions?.TruncationMessageCount, target.ExecutionOptions?.TruncationMessageCount); + Assert.Equal(source.ExecutionOptions?.ParallelToolCallsEnabled, target.ExecutionOptions?.ParallelToolCallsEnabled); + AssertCollection.Equal(source.CodeInterpreterFileIds, target.CodeInterpreterFileIds); + AssertCollection.Equal(source.Metadata, target.Metadata); } } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs new file mode 100644 index 000000000000..a07690f42245 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs @@ -0,0 +1,103 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class OpenAIAssistantInvocationOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void OpenAIAssistantInvocationOptionsInitialState() + { + // Arrange + OpenAIAssistantInvocationOptions options = new(); + + // Assert + Assert.Null(options.ModelName); + Assert.Null(options.AdditionalInstructions); + Assert.Null(options.Metadata); + Assert.Null(options.Temperature); + Assert.Null(options.TopP); + Assert.Null(options.ParallelToolCallsEnabled); + Assert.Null(options.MaxCompletionTokens); + Assert.Null(options.MaxPromptTokens); + Assert.Null(options.TruncationMessageCount); + Assert.Null(options.EnableJsonResponse); + Assert.False(options.EnableCodeInterpreter); + Assert.False(options.EnableFileSearch); + + // Act and Assert + ValidateSerialization(options); + } + + /// + /// Verify initialization. + /// + [Fact] + public void OpenAIAssistantInvocationOptionsAssignment() + { + // Arrange + OpenAIAssistantInvocationOptions options = + new() + { + ModelName = "testmodel", + AdditionalInstructions = "test instructions", + Metadata = new Dictionary() { { "a", "1" } }, + MaxCompletionTokens = 1000, + MaxPromptTokens = 1000, + ParallelToolCallsEnabled = false, + TruncationMessageCount = 12, + Temperature = 2, + TopP = 0, + EnableCodeInterpreter = true, + EnableJsonResponse = true, + EnableFileSearch = true, + }; + + // Assert + Assert.Equal("testmodel", options.ModelName); + Assert.Equal("test instructions", options.AdditionalInstructions); + Assert.Equal(2, options.Temperature); + Assert.Equal(0, options.TopP); + Assert.Equal(1000, options.MaxCompletionTokens); + Assert.Equal(1000, options.MaxPromptTokens); + Assert.Equal(12, options.TruncationMessageCount); + Assert.False(options.ParallelToolCallsEnabled); + Assert.Single(options.Metadata); + Assert.True(options.EnableCodeInterpreter); + Assert.True(options.EnableJsonResponse); + Assert.True(options.EnableFileSearch); + + // Act and Assert + ValidateSerialization(options); + } + + private static void ValidateSerialization(OpenAIAssistantInvocationOptions source) + { + // Act + string json = JsonSerializer.Serialize(source); + + OpenAIAssistantInvocationOptions? target = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(target); + Assert.Equal(source.ModelName, target.ModelName); + Assert.Equal(source.Temperature, target.Temperature); + Assert.Equal(source.TopP, target.TopP); + Assert.Equal(source.MaxCompletionTokens, target.MaxCompletionTokens); + Assert.Equal(source.MaxPromptTokens, target.MaxPromptTokens); + Assert.Equal(source.TruncationMessageCount, target.TruncationMessageCount); + Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter); + Assert.Equal(source.EnableJsonResponse, target.EnableJsonResponse); + Assert.Equal(source.EnableFileSearch, target.EnableFileSearch); + AssertCollection.Equal(source.Metadata, target.Metadata); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs new file mode 100644 index 000000000000..7ae3cbaeacbe --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantResponseContent.cs @@ -0,0 +1,741 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Mock response payloads for . +/// +internal static class OpenAIAssistantResponseContent +{ + /// + /// Setup the response content for the . + /// + public static void SetupResponse(this HttpMessageHandlerStub messageHandlerStub, HttpStatusCode statusCode, string content) + { + messageHandlerStub.ResponseToReturn = + new HttpResponseMessage(statusCode) + { + Content = new StringContent(content) + }; + } + + /// + /// Setup the response content for the . + /// + public static void SetupResponses(this HttpMessageHandlerStub messageHandlerStub, HttpStatusCode statusCode, params string[] content) + { + foreach (var item in content) + { +#pragma warning disable CA2000 // Dispose objects before losing scope + messageHandlerStub.ResponseQueue.Enqueue( + new(statusCode) + { + Content = new StringContent(item) + }); +#pragma warning restore CA2000 // Dispose objects before losing scope + } + } + + private const string AssistantId = "asst_abc123"; + private const string ThreadId = "thread_abc123"; + private const string RunId = "run_abc123"; + private const string MessageId = "msg_abc123"; + private const string StepId = "step_abc123"; + + #region Assistant + + /// + /// The response for creating or querying an assistant definition. + /// + public static string AssistantDefinition(OpenAIAssistantCapabilities capabilities, PromptTemplateConfig templateConfig) => + AssistantDefinition(templateConfig.Name, templateConfig.Template, templateConfig.Description, capabilities); + + /// + /// The response for creating or querying an assistant definition. + /// + public static string AssistantDefinition(OpenAIAssistantDefinition definition) => + AssistantDefinition(definition.Name, definition.Instructions, definition.Description, definition); + + /// + /// The response for creating or querying an assistant definition. + /// + public static string AssistantDefinition( + string? name, + string? instructions, + string? description, + OpenAIAssistantCapabilities capabilities) + { + StringBuilder builder = new(); + builder.AppendLine("{"); + builder.AppendLine(@$" ""id"": ""{AssistantId}"","); + builder.AppendLine(@" ""object"": ""assistant"","); + builder.AppendLine(@" ""created_at"": 1698984975,"); + builder.AppendLine(@$" ""name"": ""{name}"","); + builder.AppendLine(@$" ""description"": ""{description}"","); + builder.AppendLine(@$" ""instructions"": ""{instructions}"","); + builder.AppendLine(@$" ""model"": ""{capabilities.ModelId}"","); + + bool hasCodeInterpreter = capabilities.EnableCodeInterpreter; + bool hasCodeInterpreterFiles = (capabilities.CodeInterpreterFileIds?.Count ?? 0) > 0; + bool hasFileSearch = capabilities.EnableFileSearch; + if (!hasCodeInterpreter && !hasFileSearch) + { + builder.AppendLine(@" ""tools"": [],"); + } + else + { + builder.AppendLine(@" ""tools"": ["); + + if (hasCodeInterpreter) + { + builder.Append(@$" {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}"); + } + + if (hasFileSearch) + { + builder.AppendLine(@" { ""type"": ""file_search"" }"); + } + + builder.AppendLine(" ],"); + } + + if (!hasCodeInterpreterFiles && !hasFileSearch) + { + builder.AppendLine(@" ""tool_resources"": {},"); + } + else + { + builder.AppendLine(@" ""tool_resources"": {"); + + if (hasCodeInterpreterFiles) + { + string fileIds = string.Join(",", capabilities.CodeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\"")); + builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}"); + } + + if (hasFileSearch) + { + builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{capabilities.VectorStoreId}""] }}"); + } + + builder.AppendLine(" },"); + } + + if (capabilities.Temperature.HasValue) + { + builder.AppendLine(@$" ""temperature"": {capabilities.Temperature},"); + } + + if (capabilities.TopP.HasValue) + { + builder.AppendLine(@$" ""top_p"": {capabilities.TopP},"); + } + + bool hasExecutionOptions = capabilities.ExecutionOptions != null; + int metadataCount = (capabilities.Metadata?.Count ?? 0); + if (metadataCount == 0 && !hasExecutionOptions) + { + builder.AppendLine(@" ""metadata"": {}"); + } + else + { + int index = 0; + builder.AppendLine(@" ""metadata"": {"); + + if (hasExecutionOptions) + { + string serializedExecutionOptions = JsonSerializer.Serialize(capabilities.ExecutionOptions); + builder.AppendLine(@$" ""{OpenAIAssistantAgent.OptionsMetadataKey}"": ""{JsonEncodedText.Encode(serializedExecutionOptions)}""{(metadataCount > 0 ? "," : string.Empty)}"); + } + + if (metadataCount > 0) + { + foreach (var (key, value) in capabilities.Metadata!) + { + builder.AppendLine(@$" ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}"); + ++index; + } + } + + builder.AppendLine(" }"); + } + + builder.AppendLine("}"); + + return builder.ToString(); + } + + public const string DeleteAgent = + $$$""" + { + "id": "{{{AssistantId}}}", + "object": "assistant.deleted", + "deleted": true + } + """; + + public const string CreateThread = + $$$""" + { + "id": "{{{ThreadId}}}", + "object": "thread", + "created_at": 1699012949, + "metadata": {} + } + """; + + public const string DeleteThread = + $$$""" + { + "id": "{{{ThreadId}}}", + "object": "thread.deleted", + "deleted": true + } + """; + + public const string ToolResponse = "{ }"; + + public const string GetImageMessage = + $$$""" + { + "id": "{{{MessageId}}}", + "object": "thread.message", + "created_at": 1699017614, + "thread_id": "{{{ThreadId}}}", + "role": "user", + "content": [ + { + "type": "image_file", + "image_file": { + "file_id": "file_123" + } + } + ], + "assistant_id": "{{{AssistantId}}}", + "run_id": "{{{RunId}}}" + } + """; + + public static string GetTextMessage(string text = "test") => + $$$""" + { + "id": "{{{MessageId}}}", + "object": "thread.message", + "created_at": 1699017614, + "thread_id": "{{{ThreadId}}}", + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "{{{text}}}", + "annotations": [] + } + } + ], + "assistant_id": "{{{AssistantId}}}", + "run_id": "{{{RunId}}}" + } + """; + + public const string GetTextMessageWithAnnotation = + $$$""" + { + "id": "{{{MessageId}}}", + "object": "thread.message", + "created_at": 1699017614, + "thread_id": "{{{ThreadId}}}", + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.**f1", + "annotations": [ + { + "type": "file_citation", + "text": "**f1", + "file_citation": { + "file_id": "file_123", + "quote": "does" + }, + "start_index": 3, + "end_index": 6 + } + ] + } + } + ], + "assistant_id": "{{{AssistantId}}}", + "run_id": "{{{RunId}}}" + } + """; + + public const string ListAgentsPageMore = + $$$""" + { + "object": "list", + "data": [ + { + "id": "{{{AssistantId}}}", + "object": "assistant", + "created_at": 1698982736, + "name": "Coding Tutor", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are a helpful assistant designed to make me better at coding!", + "tools": [], + "metadata": {} + }, + { + "id": "asst_abc456", + "object": "assistant", + "created_at": 1698982718, + "name": "My Assistant", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are a helpful assistant designed to make me better at coding!", + "tools": [], + "metadata": {} + }, + { + "id": "asst_abc789", + "object": "assistant", + "created_at": 1698982643, + "name": null, + "description": null, + "model": "gpt-4-turbo", + "instructions": null, + "tools": [], + "metadata": {} + } + ], + "first_id": "{{{AssistantId}}}", + "last_id": "asst_abc789", + "has_more": true + } + """; + + public const string ListAgentsPageFinal = + """ + { + "object": "list", + "data": [ + { + "id": "asst_abc789", + "object": "assistant", + "created_at": 1698982736, + "name": "Coding Tutor", + "description": null, + "model": "gpt-4-turbo", + "instructions": "You are a helpful assistant designed to make me better at coding!", + "tools": [], + "metadata": {} + } + ], + "first_id": "asst_abc789", + "last_id": "asst_abc789", + "has_more": false + } + """; + + public const string ListMessagesPageMore = + $$$""" + { + "object": "list", + "data": [ + { + "id": "{{{MessageId}}}", + "object": "thread.message", + "created_at": 1699016383, + "thread_id": "{{{ThreadId}}}", + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.", + "annotations": [] + } + } + ], + "file_ids": [], + "assistant_id": null, + "run_id": null, + "metadata": {} + }, + { + "id": "msg_abc456", + "object": "thread.message", + "created_at": 1699016383, + "thread_id": "{{{ThreadId}}}", + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "Hello, what is AI?", + "annotations": [] + } + } + ], + "file_ids": [ + "file-abc123" + ], + "assistant_id": null, + "run_id": null, + "metadata": {} + } + ], + "first_id": "{{{MessageId}}}", + "last_id": "msg_abc456", + "has_more": true + } + """; + + public const string ListMessagesPageFinal = + $$$""" + { + "object": "list", + "data": [ + { + "id": "msg_abc789", + "object": "thread.message", + "created_at": 1699016383, + "thread_id": "{{{ThreadId}}}", + "role": "user", + "content": [ + { + "type": "text", + "text": { + "value": "How does AI work? Explain it in simple terms.", + "annotations": [] + } + } + ], + "file_ids": [], + "assistant_id": null, + "run_id": null, + "metadata": {} + } + ], + "first_id": "msg_abc789", + "last_id": "msg_abc789", + "has_more": false + } + """; + + public static string UploadFile = + """ + { + "id": "file-abc123", + "object": "file", + "bytes": 120000, + "created_at": 1677610602, + "filename": "test.txt", + "purpose": "assistants" + } + """; + + #endregion + + /// + /// Response payloads for a "regular" assistant run. + /// + public static class Run + { + public const string CreateRun = + $$$""" + { + "id": "{{{RunId}}}", + "object": "thread.run", + "created_at": 1699063290, + "assistant_id": "{{{AssistantId}}}", + "thread_id": "{{{ThreadId}}}", + "status": "queued", + "started_at": 1699063290, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699063291, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "tools": [], + "file_ids": [], + "metadata": {}, + "usage": null, + "temperature": 1 + } + """; + + public const string PendingRun = + $$$""" + { + "id": "{{{RunId}}}", + "object": "thread.run", + "created_at": 1699063290, + "assistant_id": "{{{AssistantId}}}", + "thread_id": "{{{ThreadId}}}", + "status": "requires_action", + "started_at": 1699063290, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699063291, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "tools": [], + "file_ids": [], + "metadata": {}, + "usage": null, + "temperature": 1 + } + """; + + public const string CompletedRun = + $$$""" + { + "id": "{{{RunId}}}", + "object": "thread.run", + "created_at": 1699063290, + "assistant_id": "{{{AssistantId}}}", + "thread_id": "{{{ThreadId}}}", + "status": "completed", + "started_at": 1699063290, + "expires_at": null, + "cancelled_at": null, + "failed_at": null, + "completed_at": 1699063291, + "last_error": null, + "model": "gpt-4-turbo", + "instructions": null, + "tools": [], + "file_ids": [], + "metadata": {}, + "usage": null, + "temperature": 1 + } + """; + + public const string MessageSteps = + $$$""" + { + "object": "list", + "data": [ + { + "id": "{{{StepId}}}", + "object": "thread.run.step", + "created_at": 1699063291, + "run_id": "{{{RunId}}}", + "assistant_id": "{{{AssistantId}}}", + "thread_id": "{{{ThreadId}}}", + "type": "message_creation", + "status": "completed", + "cancelled_at": null, + "completed_at": 1699063291, + "expired_at": null, + "failed_at": null, + "last_error": null, + "step_details": { + "type": "message_creation", + "message_creation": { + "message_id": "{{{MessageId}}}" + } + }, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 + } + } + ], + "first_id": "{{{StepId}}}", + "last_id": "step_abc456", + "has_more": false + } + """; + + public const string ToolSteps = + $$$""" + { + "object": "list", + "data": [ + { + "id": "step_abc987", + "object": "thread.run.step", + "created_at": 1699063291, + "run_id": "{{{RunId}}}", + "assistant_id": "{{{AssistantId}}}", + "thread_id": "{{{ThreadId}}}", + "type": "tool_calls", + "status": "in_progress", + "cancelled_at": null, + "completed_at": 1699063291, + "expired_at": null, + "failed_at": null, + "last_error": null, + "step_details": { + "type": "tool_calls", + "tool_calls": [ + { + "id": "tool_1", + "type": "function", + "function": { + "name": "MyPlugin-MyFunction", + "arguments": "{ \"index\": 3 }", + "output": "test" + } + } + ] + }, + "usage": { + "prompt_tokens": 123, + "completion_tokens": 456, + "total_tokens": 579 + } + } + ], + "first_id": "{{{StepId}}}", + "last_id": "step_abc456", + "has_more": false + } + """; + } + + /// + /// Response payloads for a streaming assistant run. + /// + public static class Streaming + { + public static string Response(params string[] eventPayloads) + { + StringBuilder builder = new(); + + foreach (string payload in eventPayloads) + { + builder.Append(payload); + builder.AppendLine(); + builder.AppendLine(); + } + + return builder.ToString(); + } + + public const string Done = + """ + event: thread.done + data: [DONE] + """; + + public static string CreateRun(string eventType) + { + int? createdAt = null; + int? startedAt = null; + int? completedAt = null; + int? expiresAt = null; + string? status = null; + + switch (eventType) + { + case "created": + status = "created"; + createdAt = 1725978974; + break; + case "queued": + status = "queued"; + createdAt = 1725978974; + break; + case "in_progress": + status = "in_progress"; + createdAt = 1725978974; + startedAt = 1725978975; + expiresAt = 1725979576; + break; + case "completed": + status = "completed"; + createdAt = 1725978974; + startedAt = 1725978975; + expiresAt = 1725979576; + completedAt = 1725978976; + break; + } + + Assert.NotNull(status); + + return + CreateEvent( + $"thread.run.{eventType}", + $$$""" + { + "id": "{{{RunId}}}", + "object": "thread.run", + "assistant_id": "{{{AssistantId}}}", + "thread_id": "{{{ThreadId}}}", + "status": "{{{status}}}", + "created_at": {{{ParseTimestamp(createdAt)}}}, + "started_at": {{{ParseTimestamp(startedAt)}}}, + "expires_at": {{{ParseTimestamp(expiresAt)}}}, + "completed_at": {{{ParseTimestamp(completedAt)}}}, + "required_action": null, + "model": "gpt-4o-mini", + "instructions": "test", + "tools": [], + "metadata": {}, + "temperature": 1.0, + "top_p": 1.0, + "truncation_strategy": { "type": "auto" }, + "incomplete_details": null, + "usage": null, + "response_format": "auto", + "tool_choice": "auto", + "parallel_tool_calls": true + } + """); + } + + public static string DeltaMessage(string text) => + CreateEvent( + "thread.message.delta", + $$$""" + { + "id": "{{{MessageId}}}", + "object": "thread.message.delta", + "delta": { + "content": [ + { + "index": 0, + "type": "text", + "text": { "value": "{{{text}}}", "annotations": [] } + } + ] + } + } + """); + + private static string ParseTimestamp(int? timestamp) + { + if (timestamp.HasValue) + { + return timestamp.Value.ToString(); + } + + return "0"; + } + + private static string CreateEvent(string eventType, string data) => + $""" + event: {eventType} + data: {data.Replace("\n", string.Empty).Replace("\r", string.Empty)} + """; + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs new file mode 100644 index 000000000000..6217e1f38395 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs @@ -0,0 +1,136 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ClientModel; +using System.Net.Http; +using Azure.Core; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Moq; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class OpenAIClientProviderTests +{ + /// + /// Verify that provisioning of client for Azure OpenAI. + /// + [Fact] + public void VerifyOpenAIClientProviderTargetAzureByKey() + { + // Act + OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential("key"), new Uri("https://localhost")); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that provisioning of client for Azure OpenAI. + /// + [Fact] + public void VerifyOpenAIClientProviderTargetAzureByCredential() + { + // Arrange + Mock mockCredential = new(); + + // Act + OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(mockCredential.Object, new Uri("https://localhost")); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that provisioning of client for OpenAI. + /// + [Theory] + [InlineData(null)] + [InlineData("http://myproxy:9819")] + public void VerifyOpenAIClientProviderTargetOpenAINoKey(string? endpoint) + { + // Act + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(endpoint != null ? new Uri(endpoint) : null); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that provisioning of client for OpenAI. + /// + [Theory] + [InlineData("key", null)] + [InlineData("key", "http://myproxy:9819")] + public void VerifyOpenAIClientProviderTargetOpenAIByKey(string key, string? endpoint) + { + // Act + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(key), endpoint != null ? new Uri(endpoint) : null); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that the factory can create a client with http proxy. + /// + [Fact] + public void VerifyOpenAIClientProviderWithHttpClient() + { + // Arrange + using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") }; + + // Act + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(httpClient: httpClient); + + // Assert + Assert.NotNull(provider.Client); + + // Arrange + using HttpClient httpClientWithHeaders = new() { BaseAddress = new Uri("http://myproxy:9819") }; + httpClient.DefaultRequestHeaders.Add("X-Test", "Test"); + + // Act + OpenAIClientProvider providerWithHeaders = OpenAIClientProvider.ForOpenAI(httpClient: httpClient); + + // Assert + Assert.NotNull(providerWithHeaders.Client); + + Assert.NotEqual(provider.ConfigurationKeys.Count, providerWithHeaders.ConfigurationKeys.Count); + } + + /// + /// Verify that the factory can create a client with http proxy. + /// + [Fact] + public void VerifyOpenAIClientProviderWithHttpClientHeaders() + { + // Arrange + using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") }; + httpClient.DefaultRequestHeaders.Add("X-Test", "Test"); + + // Act + OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(httpClient: httpClient); + + // Assert + Assert.NotNull(provider.Client); + } + + /// + /// Verify that the factory can accept an client that already exists. + /// + [Fact] + public void VerifyOpenAIClientProviderFromClient() + { + // Arrange + Mock mockClient = new(); + OpenAIClientProvider provider = OpenAIClientProvider.FromClient(mockClient.Object); + + // Assert + Assert.NotNull(provider.Client); + Assert.Equal(mockClient.Object, provider.Client); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs new file mode 100644 index 000000000000..1689bec1f828 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs @@ -0,0 +1,75 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class OpenAIThreadCreationOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void OpenAIThreadCreationOptionsInitialState() + { + // Arrange + OpenAIThreadCreationOptions options = new(); + + // Assert + Assert.Null(options.Messages); + Assert.Null(options.Metadata); + Assert.Null(options.VectorStoreId); + Assert.Null(options.CodeInterpreterFileIds); + + // Act and Assert + ValidateSerialization(options); + } + + /// + /// Verify initialization. + /// + [Fact] + public void OpenAIThreadCreationOptionsAssignment() + { + // Arrange + OpenAIThreadCreationOptions options = + new() + { + Messages = [new ChatMessageContent(AuthorRole.User, "test")], + VectorStoreId = "#vs", + Metadata = new Dictionary() { { "a", "1" } }, + CodeInterpreterFileIds = ["file1"], + }; + + // Assert + Assert.Single(options.Messages); + Assert.Single(options.Metadata); + Assert.Equal("#vs", options.VectorStoreId); + Assert.Single(options.CodeInterpreterFileIds); + + // Act and Assert + ValidateSerialization(options); + } + + private static void ValidateSerialization(OpenAIThreadCreationOptions source) + { + // Act + string json = JsonSerializer.Serialize(source); + + OpenAIThreadCreationOptions? target = JsonSerializer.Deserialize(json); + + // Assert + Assert.NotNull(target); + Assert.Equal(source.VectorStoreId, target.VectorStoreId); + AssertCollection.Equal(source.CodeInterpreterFileIds, target.CodeInterpreterFileIds); + AssertCollection.Equal(source.Messages, target.Messages, m => m.Items.Count); // ChatMessageContent already validated for deep serialization + AssertCollection.Equal(source.Metadata, target.Metadata); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs new file mode 100644 index 000000000000..e75a962dfc5e --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs @@ -0,0 +1,71 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Unit testing of . +/// +public class RunPollingOptionsTests +{ + /// + /// Verify initial state. + /// + [Fact] + public void RunPollingOptionsInitialStateTest() + { + // Arrange + RunPollingOptions options = new(); + + // Assert + Assert.Equal(RunPollingOptions.DefaultPollingInterval, options.RunPollingInterval); + Assert.Equal(RunPollingOptions.DefaultPollingBackoff, options.RunPollingBackoff); + Assert.Equal(RunPollingOptions.DefaultMessageSynchronizationDelay, options.MessageSynchronizationDelay); + Assert.Equal(RunPollingOptions.DefaultPollingBackoffThreshold, options.RunPollingBackoffThreshold); + } + + /// s + /// Verify initialization. + /// + [Fact] + public void RunPollingOptionsAssignmentTest() + { + // Arrange + RunPollingOptions options = + new() + { + RunPollingInterval = TimeSpan.FromSeconds(3), + RunPollingBackoff = TimeSpan.FromSeconds(4), + RunPollingBackoffThreshold = 8, + MessageSynchronizationDelay = TimeSpan.FromSeconds(5), + }; + + // Assert + Assert.Equal(3, options.RunPollingInterval.TotalSeconds); + Assert.Equal(4, options.RunPollingBackoff.TotalSeconds); + Assert.Equal(5, options.MessageSynchronizationDelay.TotalSeconds); + Assert.Equal(8, options.RunPollingBackoffThreshold); + } + + /// s + /// Verify initialization. + /// + [Fact] + public void RunPollingOptionsGetIntervalTest() + { + // Arrange + RunPollingOptions options = + new() + { + RunPollingInterval = TimeSpan.FromSeconds(3), + RunPollingBackoff = TimeSpan.FromSeconds(4), + RunPollingBackoffThreshold = 8, + }; + + // Assert + Assert.Equal(options.RunPollingInterval, options.GetPollingInterval(8)); + Assert.Equal(options.RunPollingBackoff, options.GetPollingInterval(9)); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Connectors.AzureAIInference.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Connectors.AzureAIInference.UnitTests.csproj new file mode 100644 index 000000000000..acf3f919710f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Connectors.AzureAIInference.UnitTests.csproj @@ -0,0 +1,48 @@ +๏ปฟ + + + SemanticKernel.Connectors.AzureAIInference.UnitTests + $(AssemblyName) + net8.0 + true + enable + disable + false + $(NoWarn);CA2007,CA1806,CS1591,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0070 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + + Always + + + + diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Core/ChatClientCoreTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Core/ChatClientCoreTests.cs new file mode 100644 index 000000000000..d844ac784ba9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Core/ChatClientCoreTests.cs @@ -0,0 +1,184 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.AI.Inference; +using Azure.Core; +using Azure.Core.Pipeline; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Connectors.AzureAIInference.Core; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureAIInference.UnitTests.Core; + +public sealed class ChatClientCoreTests +{ + private readonly Uri _endpoint = new("http://localhost"); + + [Fact] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected() + { + // Arrange + var logger = new Mock>().Object; + var breakingGlassClient = new ChatCompletionsClient(this._endpoint, new AzureKeyCredential("key")); + + // Act + var clientCoreModelConstructor = new ChatClientCore("model1", "apiKey", this._endpoint); + var clientCoreBreakingGlassConstructor = new ChatClientCore("model1", breakingGlassClient, logger: logger); + + // Assert + Assert.Equal("model1", clientCoreModelConstructor.ModelId); + Assert.Equal("model1", clientCoreBreakingGlassConstructor.ModelId); + + Assert.NotNull(clientCoreModelConstructor.Client); + Assert.NotNull(clientCoreBreakingGlassConstructor.Client); + Assert.Equal(breakingGlassClient, clientCoreBreakingGlassConstructor.Client); + Assert.Equal(NullLogger.Instance, clientCoreModelConstructor.Logger); + Assert.Equal(logger, clientCoreBreakingGlassConstructor.Logger); + } + + [Theory] + [InlineData("http://localhost", null)] + [InlineData(null, "http://localhost")] + [InlineData("http://localhost-1", "http://localhost-2")] + public void ItUsesEndpointAsExpected(string? clientBaseAddress, string? providedEndpoint) + { + // Arrange + Uri? endpoint = null; + HttpClient? client = null; + if (providedEndpoint is not null) + { + endpoint = new Uri(providedEndpoint); + } + + if (clientBaseAddress is not null) + { + client = new HttpClient { BaseAddress = new Uri(clientBaseAddress) }; + } + + // Act + var clientCore = new ChatClientCore("model", "apiKey", endpoint: endpoint, httpClient: client); + + // Assert + Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/v1"), clientCore.Endpoint); + + Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.EndpointKey)); + Assert.Equal(endpoint?.ToString() ?? client?.BaseAddress?.ToString(), clientCore.Attributes[AIServiceExtensions.EndpointKey]); + + client?.Dispose(); + } + + [Fact] + public void ItThrowsIfNoEndpointOptionIsProvided() + { + // Act & Assert + Assert.Throws(() => new ChatClientCore("model", "apiKey", endpoint: null, httpClient: null)); + } + + [Fact] + public async Task ItAddSemanticKernelHeadersOnEachRequestAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new(); + using HttpClient httpClient = new(handler); + httpClient.BaseAddress = this._endpoint; + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + var clientCore = new ChatClientCore(modelId: "model", apiKey: "test", httpClient: httpClient); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = RequestMethod.Post; + pipelineMessage.Request.Uri = new RequestUriBuilder() { Host = "localhost", Scheme = "https" }; + pipelineMessage.Request.Content = RequestContent.Create(new BinaryData("test")); + + // Act + await clientCore.Client.Pipeline.SendAsync(pipelineMessage, CancellationToken.None); + + // Assert + Assert.True(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)); + Assert.Equal(HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ChatClientCore)), handler.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).FirstOrDefault()); + + Assert.True(handler.RequestHeaders.Contains("User-Agent")); + Assert.Contains(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault()); + } + + [Fact] + public async Task ItDoesNotAddSemanticKernelHeadersWhenBreakingGlassClientIsProvidedAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new(); + using HttpClient httpClient = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + var clientCore = new ChatClientCore( + modelId: "model", + chatClient: new ChatCompletionsClient(this._endpoint, new AzureKeyCredential("api-key"), + new ChatCompletionsClientOptions() + { + Transport = new HttpClientTransport(httpClient), + RetryPolicy = new RetryPolicy(maxRetries: 0), // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. + Retry = { NetworkTimeout = Timeout.InfiniteTimeSpan } // Disable Azure SDK default timeout + })); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = RequestMethod.Post; + pipelineMessage.Request.Uri = new RequestUriBuilder { Scheme = "http", Host = "http://localhost" }; + pipelineMessage.Request.Content = RequestContent.Create(new BinaryData("test")); + + // Act + await clientCore.Client.Pipeline.SendAsync(pipelineMessage, CancellationToken.None); + + // Assert + Assert.False(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)); + Assert.DoesNotContain(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault()); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData("value")] + public void ItAddsAttributesButDoesNothingIfNullOrEmpty(string? value) + { + // Arrange + var clientCore = new ChatClientCore("model", "api-key", this._endpoint); + + // Act + clientCore.AddAttribute("key", value); + + // Assert + if (string.IsNullOrEmpty(value)) + { + Assert.False(clientCore.Attributes.ContainsKey("key")); + } + else + { + Assert.True(clientCore.Attributes.ContainsKey("key")); + Assert.Equal(value, clientCore.Attributes["key"]); + } + } + + [Fact] + public void ItAddsModelIdAttributeAsExpected() + { + // Arrange + var expectedModelId = "modelId"; + + // Act + var clientCore = new ChatClientCore(expectedModelId, "api-key", this._endpoint); + var clientCoreBreakingGlass = new ChatClientCore(expectedModelId, new ChatCompletionsClient(this._endpoint, new AzureKeyCredential(" "))); + + // Assert + Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.True(clientCoreBreakingGlass.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.Equal(expectedModelId, clientCore.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal(expectedModelId, clientCoreBreakingGlass.Attributes[AIServiceExtensions.ModelIdKey]); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Extensions/AzureAIInferenceKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Extensions/AzureAIInferenceKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..8d5b31548b5f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Extensions/AzureAIInferenceKernelBuilderExtensionsTests.cs @@ -0,0 +1,49 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Azure; +using Azure.AI.Inference; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; +using Xunit; + +namespace SemanticKernel.Connectors.AzureAIInference.UnitTests.Extensions; +public sealed class AzureAIInferenceKernelBuilderExtensionsTests +{ + private readonly Uri _endpoint = new("https://endpoint"); + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.BreakingGlassClientInline)] + [InlineData(InitializationType.BreakingGlassInServiceProvider)] + public void KernelBuilderAddAzureAIInferenceChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var client = new ChatCompletionsClient(this._endpoint, new AzureKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureAIInferenceChatCompletion("model-id", "api-key", this._endpoint), + InitializationType.BreakingGlassClientInline => builder.AddAzureAIInferenceChatCompletion("model-id", client), + InitializationType.BreakingGlassInServiceProvider => builder.AddAzureAIInferenceChatCompletion("model-id", chatClient: null), + _ => builder + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is AzureAIInferenceChatCompletionService); + } + + public enum InitializationType + { + ApiKey, + BreakingGlassClientInline, + BreakingGlassInServiceProvider, + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Extensions/AzureAIInferenceServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Extensions/AzureAIInferenceServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..02b26f12921b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Extensions/AzureAIInferenceServiceCollectionExtensionsTests.cs @@ -0,0 +1,50 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Azure; +using Azure.AI.Inference; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; +using Xunit; + +namespace SemanticKernel.Connectors.AzureAIInference.UnitTests.Extensions; + +public sealed class AzureAIInferenceServiceCollectionExtensionsTests +{ + private readonly Uri _endpoint = new("https://endpoint"); + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ItCanAddChatCompletionService(InitializationType type) + { + // Arrange + var client = new ChatCompletionsClient(this._endpoint, new AzureKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureAIInferenceChatCompletion("modelId", "api-key", this._endpoint), + InitializationType.ClientInline => builder.Services.AddAzureAIInferenceChatCompletion("modelId", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureAIInferenceChatCompletion("modelId", chatClient: null), + _ => builder.Services + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is AzureAIInferenceChatCompletionService); + } + + public enum InitializationType + { + ApiKey, + ClientInline, + ClientInServiceProvider, + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs new file mode 100644 index 000000000000..44bd2c006661 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Services/AzureAIInferenceChatCompletionServiceTests.cs @@ -0,0 +1,280 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure; +using Azure.AI.Inference; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureAIInference.UnitTests.Services; + +/// +/// Tests for the class. +/// +public sealed class AzureAIInferenceChatCompletionServiceTests : IDisposable +{ + private readonly Uri _endpoint = new("https://localhost:1234"); + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly MultipleHttpMessageHandlerStub _multiMessageHandlerStub; + private readonly HttpClient _httpClient; + private readonly HttpClient _httpClientWithBaseAddress; + private readonly AzureAIInferencePromptExecutionSettings _executionSettings; + private readonly Mock _mockLoggerFactory; + private readonly ChatHistory _chatHistoryForTest = [new ChatMessageContent(AuthorRole.User, "test")]; + + public AzureAIInferenceChatCompletionServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._multiMessageHandlerStub = new MultipleHttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._httpClientWithBaseAddress = new HttpClient(this._messageHandlerStub, false) { BaseAddress = this._endpoint }; + this._mockLoggerFactory = new Mock(); + this._executionSettings = new AzureAIInferencePromptExecutionSettings(); + } + + /// + /// Checks that the constructors work as expected. + /// + [Fact] + public void ConstructorsWorksAsExpected() + { + // Arrange + using var httpClient = new HttpClient() { BaseAddress = this._endpoint }; + var loggerFactoryMock = new Mock(); + ChatCompletionsClient client = new(this._endpoint, new AzureKeyCredential("api-key")); + + // Act & Assert + // Endpoint constructor + new AzureAIInferenceChatCompletionService(endpoint: this._endpoint, apiKey: null); // Only the endpoint + new AzureAIInferenceChatCompletionService(httpClient: httpClient, apiKey: null); // Only the HttpClient with a BaseClass defined + new AzureAIInferenceChatCompletionService(modelId: "model", endpoint: this._endpoint, apiKey: null); // ModelId and endpoint + new AzureAIInferenceChatCompletionService(modelId: "model", apiKey: "api-key", endpoint: this._endpoint); // ModelId, apiKey, and endpoint + new AzureAIInferenceChatCompletionService(endpoint: this._endpoint, apiKey: null, loggerFactory: loggerFactoryMock.Object); // Endpoint and loggerFactory + + // Breaking Glass constructor + new AzureAIInferenceChatCompletionService(modelId: null, chatClient: client); // Client without model + new AzureAIInferenceChatCompletionService(modelId: "model", chatClient: client); // Client + new AzureAIInferenceChatCompletionService(modelId: "model", chatClient: client, loggerFactory: loggerFactoryMock.Object); // Client + } + + [Theory] + [InlineData("http://localhost:1234/chat/completions")] // Uses full path when provided + [InlineData("http://localhost:1234/v2/chat/completions")] // Uses full path when provided + [InlineData("http://localhost:1234")] + [InlineData("http://localhost:8080")] + [InlineData("https://something:8080")] // Accepts TLS Secured endpoints + [InlineData("http://localhost:1234/v2")] + [InlineData("http://localhost:8080/v2")] + public async Task ItUsesCustomEndpointsWhenProvidedDirectlyAsync(string endpoint) + { + // Arrange + var chatCompletion = new AzureAIInferenceChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpoint)); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = this.CreateDefaultStringContent() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.StartsWith($"{endpoint}/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); + } + + [Theory] + [InlineData("http://localhost:1234/chat/completions")] // Uses full path when provided + [InlineData("http://localhost:1234/v2/chat/completions")] // Uses full path when provided + [InlineData("http://localhost:1234")] + [InlineData("http://localhost:8080")] + [InlineData("https://something:8080")] // Accepts TLS Secured endpoints + [InlineData("http://localhost:1234/v2")] + [InlineData("http://localhost:8080/v2")] + public async Task ItPrioritizesCustomEndpointOverHttpClientBaseAddressAsync(string endpoint) + { + // Arrange + this._httpClient.BaseAddress = new Uri("http://should-be-overridden"); + var chatCompletion = new AzureAIInferenceChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpoint)); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = this.CreateDefaultStringContent() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.StartsWith($"{endpoint}/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); + } + + [Fact] + public async Task ItUsesHttpClientBaseAddressWhenNoEndpointIsProvidedAsync() + { + // Arrange + this._httpClient.BaseAddress = this._endpoint; + var chatCompletion = new AzureAIInferenceChatCompletionService(modelId: "any", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { Content = this.CreateDefaultStringContent() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.StartsWith(this._endpoint.ToString(), this._messageHandlerStub.RequestUri?.ToString()); + } + + [Fact] + public void ItThrowsIfNoEndpointOrNoHttpClientBaseAddressIsProvided() + { + // Act & Assert + Assert.Throws(() => new AzureAIInferenceChatCompletionService(endpoint: null, httpClient: this._httpClient)); + } + + [Fact] + public async Task ItGetChatMessageContentsShouldHaveModelIdDefinedAsync() + { + // Arrange + var chatCompletion = new AzureAIInferenceChatCompletionService(apiKey: "NOKEY", httpClient: this._httpClientWithBaseAddress); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = this.CreateDefaultStringContent() }; + + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + var chatMessage = await chatCompletion.GetChatMessageContentAsync(chatHistory, this._executionSettings); + + // Assert + Assert.NotNull(chatMessage.ModelId); + Assert.Equal("phi3-medium-4k", chatMessage.ModelId); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureAIInferenceChatCompletionService(httpClient: this._httpClientWithBaseAddress); + await using var stream = File.OpenRead("TestData/chat_completion_streaming_response.txt"); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([]).GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + Assert.Equal(AuthorRole.Assistant, enumerator.Current.Role); + + await enumerator.MoveNextAsync(); + Assert.Equal("Test content", enumerator.Current.Content); + Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + } + + [Fact] + public async Task GetChatMessageContentsWithChatMessageContentItemCollectionCorrectlyAsync() + { + // Arrange + const string Prompt = "This is test prompt"; + const string AssistantMessage = "This is assistant message"; + const string CollectionItemPrompt = "This is collection item prompt"; + var chatCompletion = new AzureAIInferenceChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClientWithBaseAddress); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = this.CreateDefaultStringContent() }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage(Prompt); + chatHistory.AddAssistantMessage(AssistantMessage); + chatHistory.AddUserMessage( + [ + new TextContent(CollectionItemPrompt), + new ImageContent(new Uri("https://image")) + ]); + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + + Assert.Equal(3, messages.GetArrayLength()); + + Assert.Equal(Prompt, messages[0].GetProperty("content").GetString()); + Assert.Equal("user", messages[0].GetProperty("role").GetString()); + + Assert.Equal(AssistantMessage, messages[1].GetProperty("content").GetString()); + Assert.Equal("assistant", messages[1].GetProperty("role").GetString()); + + var contentItems = messages[2].GetProperty("content"); + Assert.Equal(2, contentItems.GetArrayLength()); + Assert.Equal(CollectionItemPrompt, contentItems[0].GetProperty("text").GetString()); + Assert.Equal("text", contentItems[0].GetProperty("type").GetString()); + Assert.Equal("https://image/", contentItems[1].GetProperty("image_url").GetProperty("url").GetString()); + Assert.Equal("image_url", contentItems[1].GetProperty("type").GetString()); + } + + [Theory] + [InlineData("string", "json_object")] + [InlineData("string", "text")] + [InlineData("string", "random")] + [InlineData("JsonElement.String", "\"json_object\"")] + [InlineData("JsonElement.String", "\"text\"")] + [InlineData("JsonElement.String", "\"random\"")] + [InlineData("ChatResponseFormat", "json_object")] + [InlineData("ChatResponseFormat", "text")] + public async Task GetChatMessageInResponseFormatsAsync(string formatType, string formatValue) + { + // Arrange + object? format = null; + switch (formatType) + { + case "string": + format = formatValue; + break; + case "JsonElement.String": + format = JsonSerializer.Deserialize(formatValue); + break; + case "ChatResponseFormat": + format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJSON(); + break; + } + + var sut = new AzureAIInferenceChatCompletionService(httpClient: this._httpClientWithBaseAddress); + AzureAIInferencePromptExecutionSettings executionSettings = new() { ResponseFormat = format }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_response.json")) + }; + + // Act + var result = await sut.GetChatMessageContentAsync(this._chatHistoryForTest, executionSettings); + + // Assert + Assert.NotNull(result); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._httpClientWithBaseAddress.Dispose(); + this._messageHandlerStub.Dispose(); + this._multiMessageHandlerStub.Dispose(); + } + + private StringContent CreateDefaultStringContent() + { + return new StringContent(File.ReadAllText("TestData/chat_completion_response.json")); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Settings/AzureAIInferencePromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Settings/AzureAIInferencePromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..c61a261e7d30 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/Settings/AzureAIInferencePromptExecutionSettingsTests.cs @@ -0,0 +1,240 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; +using Xunit; + +namespace SemanticKernel.Connectors.AzureAIInference.UnitTests.Settings; +public sealed class AzureAIInferencePromptExecutionSettingsTests +{ + [Fact] + public void ItCreatesAzureAIInferenceExecutionSettingsWithCorrectDefaults() + { + // Arrange + // Act + AzureAIInferencePromptExecutionSettings executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(null); + + // Assert + Assert.NotNull(executionSettings); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.FrequencyPenalty); + Assert.Null(executionSettings.PresencePenalty); + Assert.Null(executionSettings.NucleusSamplingFactor); + Assert.Null(executionSettings.ResponseFormat); + Assert.Null(executionSettings.Seed); + Assert.Null(executionSettings.MaxTokens); + Assert.Empty(executionSettings.ExtensionData!); + Assert.Empty(executionSettings.Tools); + Assert.Empty(executionSettings.StopSequences!); + } + + [Fact] + public void ItUsesExistingAzureAIInferenceExecutionSettings() + { + // Arrange + AzureAIInferencePromptExecutionSettings actualSettings = new() + { + Temperature = 0.7f, + NucleusSamplingFactor = 0.7f, + FrequencyPenalty = 0.7f, + PresencePenalty = 0.7f, + StopSequences = ["foo", "bar"], + MaxTokens = 128 + }; + + // Act + AzureAIInferencePromptExecutionSettings executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(actualSettings, executionSettings); + Assert.Equal(128, executionSettings.MaxTokens); + } + + [Fact] + public void ItCanUseAzureAIInferenceExecutionSettings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() { + { "max_tokens", 1000 }, + { "temperature", 0 } + } + }; + + // Act + AzureAIInferencePromptExecutionSettings executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(1000, executionSettings.MaxTokens); + Assert.Equal(0, executionSettings.Temperature); + } + + [Fact] + public void ItCreatesAzureAIInferenceExecutionSettingsFromExtraPropertiesSnakeCase() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", 0.7 }, + { "frequency_penalty", 0.7 }, + { "presence_penalty", 0.7 }, + { "stop", new [] { "foo", "bar" } }, + { "max_tokens", 128 }, + { "seed", 123456 }, + } + }; + + // Act + AzureAIInferencePromptExecutionSettings executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesAzureAIInferenceExecutionSettingsFromExtraPropertiesAsStrings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", "0.7" }, + { "frequency_penalty", "0.7" }, + { "presence_penalty", "0.7" }, + { "stop", new [] { "foo", "bar" } }, + { "max_tokens", "128" }, + { "response_format", "json" }, + { "seed", 123456 }, + } + }; + + // Act + AzureAIInferencePromptExecutionSettings executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesAzureAIInferenceExecutionSettingsFromJsonSnakeCase() + { + // Arrange + var json = """ + { + "temperature": 0.7, + "top_p": 0.7, + "frequency_penalty": 0.7, + "presence_penalty": 0.7, + "stop": [ "foo", "bar" ], + "max_tokens": 128, + "response_format": "text", + "seed": 123456 + } + """; + var actualSettings = JsonSerializer.Deserialize(json); + + // Act + AzureAIInferencePromptExecutionSettings executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void PromptExecutionSettingsCloneWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + } + """; + var executionSettings = JsonSerializer.Deserialize(configPayload); + + // Act + var clone = executionSettings!.Clone(); + + // Assert + Assert.NotNull(clone); + Assert.Equal(executionSettings.ModelId, clone.ModelId); + Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData); + } + + [Fact] + public void PromptExecutionSettingsFreezeWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "response_format": "json", + "stop": [ "DONE" ] + } + """; + var executionSettings = JsonSerializer.Deserialize(configPayload)!; + executionSettings.ExtensionData = new Dictionary() { { "new", 5 } }; + + // Act + executionSettings!.Freeze(); + + // Assert + Assert.True(executionSettings.IsFrozen); + Assert.Throws(() => executionSettings.ModelId = "new-model"); + Assert.Throws(() => executionSettings.Temperature = 1); + Assert.Throws(() => executionSettings.FrequencyPenalty = 1); + Assert.Throws(() => executionSettings.PresencePenalty = 1); + Assert.Throws(() => executionSettings.NucleusSamplingFactor = 1); + Assert.Throws(() => executionSettings.MaxTokens = 100); + Assert.Throws(() => executionSettings.ResponseFormat = "text"); + Assert.Throws(() => executionSettings.StopSequences?.Add("STOP")); + Assert.Throws(() => executionSettings.ExtensionData["new"] = 6); + + executionSettings!.Freeze(); // idempotent + Assert.True(executionSettings.IsFrozen); + } + + [Fact] + public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences() + { + // Arrange + PromptExecutionSettings settings = new AzureAIInferencePromptExecutionSettings { StopSequences = [] }; + + // Act + var executionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(settings); + + // Assert + Assert.NotNull(executionSettings.StopSequences); + Assert.Empty(executionSettings.StopSequences); + } + + private static void AssertExecutionSettings(AzureAIInferencePromptExecutionSettings executionSettings) + { + Assert.NotNull(executionSettings); + Assert.Equal(0.7f, executionSettings.Temperature); + Assert.Equal(0.7f, executionSettings.NucleusSamplingFactor); + Assert.Equal(0.7f, executionSettings.FrequencyPenalty); + Assert.Equal(0.7f, executionSettings.PresencePenalty); + Assert.Equal(["foo", "bar"], executionSettings.StopSequences); + Assert.Equal(128, executionSettings.MaxTokens); + Assert.Equal(123456, executionSettings.Seed); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/TestData/chat_completion_response.json b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/TestData/chat_completion_response.json new file mode 100644 index 000000000000..c4b1198108fc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/TestData/chat_completion_response.json @@ -0,0 +1,22 @@ +{ + "id": "chat-00078bf2c54346c6bfa31e561462c381", + "object": "chat.completion", + "created": 1723641172, + "model": "phi3-medium-4k", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Test response", + "tool_calls": [] + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 17, + "total_tokens": 148, + "completion_tokens": 131 + } +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/TestData/chat_completion_streaming_response.txt b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/TestData/chat_completion_streaming_response.txt new file mode 100644 index 000000000000..d3ef93e3b439 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference.UnitTests/TestData/chat_completion_streaming_response.txt @@ -0,0 +1,7 @@ +data: {"id":"chat-6035afe96714485eb0998fe041bfdbdb","object":"chat.completion.chunk","created":1723641572,"model":"phi3-medium-4k","choices":[{"index":0,"delta":{"role":"assistant"},"logprobs":null,"finish_reason":null}],"usage":{"prompt_tokens":17,"total_tokens":17,"completion_tokens":0}} + +data: {"id":"chat-6035afe96714485eb0998fe041bfdbdb","object":"chat.completion.chunk","created":1723641572,"model":"phi3-medium-4k","choices":[{"index":0,"delta":{"content":"Test content"},"logprobs":null,"finish_reason":"stop","stop_reason":32007}],"usage":{"prompt_tokens":17,"total_tokens":106,"completion_tokens":89}} + +data: {"id":"chat-6035afe96714485eb0998fe041bfdbdb","object":"chat.completion.chunk","created":1723641572,"model":"phi3-medium-4k","choices":[],"usage":{"prompt_tokens":17,"total_tokens":106,"completion_tokens":89}} + +data: [DONE] \ No newline at end of file diff --git a/dotnet/src/Experimental/Agents/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/AssemblyInfo.cs similarity index 78% rename from dotnet/src/Experimental/Agents/AssemblyInfo.cs rename to dotnet/src/Connectors/Connectors.AzureAIInference/AssemblyInfo.cs index 951ee2d58289..fe66371dbc58 100644 --- a/dotnet/src/Experimental/Agents/AssemblyInfo.cs +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/AssemblyInfo.cs @@ -3,4 +3,4 @@ using System.Diagnostics.CodeAnalysis; // This assembly is currently experimental. -[assembly: Experimental("SKEXP0101")] +[assembly: Experimental("SKEXP0070")] diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Connectors.AzureAIInference.csproj b/dotnet/src/Connectors/Connectors.AzureAIInference/Connectors.AzureAIInference.csproj new file mode 100644 index 000000000000..2f87b005fda1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Connectors.AzureAIInference.csproj @@ -0,0 +1,34 @@ +๏ปฟ + + + + Microsoft.SemanticKernel.Connectors.AzureAIInference + $(AssemblyName) + net8.0;netstandard2.0 + $(NoWarn);NU5104;SKEXP0001,SKEXP0070 + false + beta + + + + + + + + + Semantic Kernel - Azure AI Inference connectors + Semantic Kernel Model as a Service connectors for Azure AI Studio. Contains clients for chat completion, embeddings and text to image generation. + + + + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Core/AddHeaderRequestPolicy.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs rename to dotnet/src/Connectors/Connectors.AzureAIInference/Core/AddHeaderRequestPolicy.cs index 89ecb3bef22b..f263e8dc1a27 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AddHeaderRequestPolicy.cs +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Core/AddHeaderRequestPolicy.cs @@ -3,7 +3,7 @@ using Azure.Core; using Azure.Core.Pipeline; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureAIInference.Core; /// /// Helper class to inject headers into Azure SDK HTTP pipeline diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Core/ChatClientCore.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Core/ChatClientCore.cs new file mode 100644 index 000000000000..047bda6cabc8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Core/ChatClientCore.cs @@ -0,0 +1,649 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.AI.Inference; +using Azure.Core; +using Azure.Core.Pipeline; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.AzureAIInference.Core; + +/// +/// Base class for AI clients that provides common functionality for interacting with Azure AI Inference services. +/// +internal sealed class ChatClientCore +{ + /// + /// Non-default endpoint for Azure AI Inference API. + /// + internal Uri? Endpoint { get; init; } + + /// + /// Non-default endpoint for Azure AI Inference API. + /// + internal string? ModelId { get; init; } + + /// + /// Logger instance + /// + internal ILogger Logger { get; init; } + + /// + /// Azure AI Inference Client + /// + internal ChatCompletionsClient Client { get; set; } + + /// + /// Storage for AI service attributes. + /// + internal Dictionary Attributes { get; } = []; + + /// + /// Initializes a new instance of the class. + /// + /// Optional target Model Id for endpoints that support multiple models + /// Azure AI Inference API Key. + /// Azure AI Inference compatible API endpoint. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal ChatClientCore( + string? modelId = null, + string? apiKey = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILogger? logger = null) + { + this.Logger = logger ?? NullLogger.Instance; + // Accepts the endpoint if provided, otherwise uses the default Azure AI Inference endpoint. + this.Endpoint = endpoint ?? httpClient?.BaseAddress; + Verify.NotNull(this.Endpoint, "endpoint or base-address"); + this.AddAttribute(AIServiceExtensions.EndpointKey, this.Endpoint.ToString()); + + if (string.IsNullOrEmpty(apiKey)) + { + // Api Key is not required, when not provided will be set to single space to avoid empty exceptions from Azure SDK AzureKeyCredential type. + // This is a common scenario when using the Azure AI Inference service thru a Gateway that may inject the API Key. + apiKey = SingleSpace; + } + + if (!string.IsNullOrEmpty(modelId)) + { + this.ModelId = modelId; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + this.Client = new ChatCompletionsClient(this.Endpoint, new AzureKeyCredential(apiKey!), GetClientOptions(httpClient)); + } + + /// + /// Initializes a new instance of the class. + /// + /// Optional target Model Id for endpoints that support multiple models + /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure AI Inference compatible API endpoint. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal ChatClientCore( + string? modelId = null, + TokenCredential? credential = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILogger? logger = null) + { + Verify.NotNull(endpoint); + Verify.NotNull(credential); + this.Logger = logger ?? NullLogger.Instance; + + this.Endpoint = endpoint ?? httpClient?.BaseAddress; + Verify.NotNull(this.Endpoint, "endpoint or base-address"); + this.AddAttribute(AIServiceExtensions.EndpointKey, this.Endpoint.ToString()); + + if (!string.IsNullOrEmpty(modelId)) + { + this.ModelId = modelId; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + this.Client = new ChatCompletionsClient(this.Endpoint, credential, GetClientOptions(httpClient)); + } + + /// + /// Initializes a new instance of the class using the specified Azure AI Inference Client. + /// Note: instances created this way might not have the default diagnostics settings, + /// it's up to the caller to configure the client. + /// + /// Target Model Id for endpoints supporting more than one + /// Custom . + /// The to use for logging. If null, no logging will be performed. + internal ChatClientCore( + string? modelId, + ChatCompletionsClient chatClient, + ILogger? logger = null) + { + Verify.NotNull(chatClient); + if (!string.IsNullOrEmpty(modelId)) + { + this.ModelId = modelId; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + this.Logger = logger ?? NullLogger.Instance; + this.Client = chatClient; + } + + /// + /// Allows adding attributes to the client. + /// + /// Attribute key. + /// Attribute value. + internal void AddAttribute(string key, string? value) + { + if (!string.IsNullOrEmpty(value)) + { + this.Attributes.Add(key, value); + } + } + + /// + /// Get chat multiple chat content choices for the prompt and settings. + /// + /// + /// This should be used when the settings request for more than one choice. + /// + /// The chat history context. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// List of different chat results generated by the remote model + internal async Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(chatHistory); + + // Convert the incoming execution settings to specialized settings. + AzureAIInferencePromptExecutionSettings chatExecutionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + // Create the SDK ChatCompletionOptions instance from all available information. + ChatCompletionsOptions chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chatHistory, kernel, this.ModelId); + + // Make the request. + ChatCompletions? responseData = null; + var extraParameters = chatExecutionSettings.ExtraParameters; + + List responseContent; + using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.ModelId ?? string.Empty, ModelProvider, chatHistory, chatExecutionSettings)) + { + try + { + responseData = (await RunRequestAsync(() => this.Client!.CompleteAsync(chatOptions, chatExecutionSettings.ExtraParameters ?? string.Empty, cancellationToken)).ConfigureAwait(false)).Value; + + this.LogUsage(responseData.Usage); + if (responseData.Choices.Count == 0) + { + throw new KernelException("Chat completions not found"); + } + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + if (responseData != null) + { + // Capture available metadata even if the operation failed. + activity + .SetResponseId(responseData.Id) + .SetPromptTokenUsage(responseData.Usage.PromptTokens) + .SetCompletionTokenUsage(responseData.Usage.CompletionTokens); + } + throw; + } + + responseContent = responseData.Choices.Select(chatChoice => this.GetChatMessage(chatChoice, responseData)).ToList(); + activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens); + } + + return responseContent; + } + + /// + /// Get streaming chat contents for the chat history provided using the specified settings. + /// + /// Throws if the specified type is not the same or fail to cast + /// The chat history to complete. + /// The AI execution settings (optional). + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Streaming list of different completion streaming string updates generated by the remote model + internal async IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(chatHistory); + + AzureAIInferencePromptExecutionSettings chatExecutionSettings = AzureAIInferencePromptExecutionSettings.FromExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chatHistory, kernel, this.ModelId); + StringBuilder? contentBuilder = null; + + // Reset state + contentBuilder?.Clear(); + + // Stream the response. + IReadOnlyDictionary? metadata = null; + string? streamedName = null; + ChatRole? streamedRole = default; + CompletionsFinishReason finishReason = default; + + using var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.ModelId ?? string.Empty, ModelProvider, chatHistory, chatExecutionSettings); + StreamingResponse response; + try + { + response = await RunRequestAsync(() => this.Client.CompleteStreamingAsync(chatOptions, cancellationToken)).ConfigureAwait(false); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); + List? streamedContents = activity is not null ? [] : null; + try + { + while (true) + { + try + { + if (!await responseEnumerator.MoveNextAsync()) + { + break; + } + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + StreamingChatCompletionsUpdate update = responseEnumerator.Current; + metadata = GetResponseMetadata(update); + streamedRole ??= update.Role; + streamedName ??= update.AuthorName; + finishReason = update.FinishReason ?? default; + + AuthorRole? role = null; + if (streamedRole.HasValue) + { + role = new AuthorRole(streamedRole.Value.ToString()); + } + + StreamingChatMessageContent streamingChatMessageContent = + new(role: update.Role.HasValue ? new AuthorRole(update.Role.ToString()!) : null, content: update.ContentUpdate, innerContent: update, modelId: update.Model, metadata: metadata) + { + AuthorName = streamedName, + Role = role, + Metadata = metadata, + }; + + streamedContents?.Add(streamingChatMessageContent); + yield return streamingChatMessageContent; + } + } + finally + { + activity?.EndStreaming(streamedContents, null); + await responseEnumerator.DisposeAsync(); + } + } + + #region Private + + private const string ModelProvider = "azure-ai-inference"; + /// + /// Instance of for metrics. + /// + private static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.AzureAIInference"); + + /// + /// Instance of to keep track of the number of prompt tokens used. + /// + private static readonly Counter s_promptTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.azure-ai-inference.tokens.prompt", + unit: "{token}", + description: "Number of prompt tokens used"); + + /// + /// Instance of to keep track of the number of completion tokens used. + /// + private static readonly Counter s_completionTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.azure-ai-inference.tokens.completion", + unit: "{token}", + description: "Number of completion tokens used"); + + /// + /// Instance of to keep track of the total number of tokens used. + /// + private static readonly Counter s_totalTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.azure-ai-inference.tokens.total", + unit: "{token}", + description: "Number of tokens used"); + + /// + /// Single space constant. + /// + private const string SingleSpace = " "; + + /// Gets options to use for an Azure AI InferenceClient + /// Custom for HTTP requests. + /// Optional API version. + /// An instance of . + private static ChatCompletionsClientOptions GetClientOptions(HttpClient? httpClient, ChatCompletionsClientOptions.ServiceVersion? serviceVersion = null) + { + ChatCompletionsClientOptions options = serviceVersion is not null ? + new(serviceVersion.Value) : + new(); + + options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; + + options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ChatClientCore))), Azure.Core.HttpPipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientTransport(httpClient); + options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided. + options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout + } + + return options; + } + + /// + /// Invokes the specified request and handles exceptions. + /// + /// Type of the response. + /// Request to invoke. + /// Returns the response. + private static async Task RunRequestAsync(Func> request) + { + try + { + return await request.Invoke().ConfigureAwait(false); + } + catch (RequestFailedException e) + { + throw e.ToHttpOperationException(); + } + } + + /// + /// Checks if the maximum tokens value is valid. + /// + /// Maximum tokens value. + /// Throws if the maximum tokens value is invalid. + private static void ValidateMaxTokens(int? maxTokens) + { + if (maxTokens.HasValue && maxTokens < 1) + { + throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); + } + } + + /// + /// Creates a new instance of based on the provided settings. + /// + /// The execution settings. + /// The chat history. + /// Kernel instance. + /// Model ID. + /// Create a new instance of . + private ChatCompletionsOptions CreateChatCompletionsOptions( + AzureAIInferencePromptExecutionSettings executionSettings, + ChatHistory chatHistory, + Kernel? kernel, + string? modelId) + { + if (this.Logger.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", + JsonSerializer.Serialize(chatHistory), + JsonSerializer.Serialize(executionSettings)); + } + + var options = new ChatCompletionsOptions + { + MaxTokens = executionSettings.MaxTokens, + Temperature = executionSettings.Temperature, + NucleusSamplingFactor = executionSettings.NucleusSamplingFactor, + FrequencyPenalty = executionSettings.FrequencyPenalty, + PresencePenalty = executionSettings.PresencePenalty, + Model = modelId, + Seed = executionSettings.Seed, + }; + + switch (executionSettings.ResponseFormat) + { + case ChatCompletionsResponseFormat formatObject: + // If the response format is an Azure SDK ChatCompletionsResponseFormat, just pass it along. + options.ResponseFormat = formatObject; + break; + + case string formatString: + // If the response format is a string, map the ones we know about, and ignore the rest. + switch (formatString) + { + case "json_object": + options.ResponseFormat = new ChatCompletionsResponseFormatJSON(); + break; + + case "text": + options.ResponseFormat = new ChatCompletionsResponseFormatText(); + break; + } + break; + + case JsonElement formatElement: + // This is a workaround for a type mismatch when deserializing a JSON into an object? type property. + // Handling only string formatElement. + if (formatElement.ValueKind == JsonValueKind.String) + { + string formatString = formatElement.GetString() ?? ""; + switch (formatString) + { + case "json_object": + options.ResponseFormat = new ChatCompletionsResponseFormatJSON(); + break; + + case "text": + options.ResponseFormat = new ChatCompletionsResponseFormatText(); + break; + } + } + break; + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + foreach (var message in chatHistory) + { + options.Messages.AddRange(GetRequestMessages(message)); + } + + return options; + } + + /// + /// Create request messages based on the chat message content. + /// + /// Chat message content. + /// A list of . + /// When the message role is not supported. + private static List GetRequestMessages(ChatMessageContent message) + { + if (message.Role == AuthorRole.System) + { + return [new ChatRequestSystemMessage(message.Content)]; + } + + if (message.Role == AuthorRole.User) + { + if (message.Items is { Count: 1 } && message.Items.FirstOrDefault() is TextContent textContent) + { + // Name removed temporarily as the Azure AI Inference service does not support it ATM. + // Issue: https://github.com/Azure/azure-sdk-for-net/issues/45415 + return [new ChatRequestUserMessage(textContent.Text) /*{ Name = message.AuthorName }*/ ]; + } + + return [new ChatRequestUserMessage(message.Items.Select(static (KernelContent item) => (ChatMessageContentItem)(item switch + { + TextContent textContent => new ChatMessageTextContentItem(textContent.Text), + ImageContent imageContent => GetImageContentItem(imageContent), + _ => throw new NotSupportedException($"Unsupported chat message content type '{item.GetType()}'.") + }))) + + // Name removed temporarily as the Azure AI Inference service does not support it ATM. + // Issue: https://github.com/Azure/azure-sdk-for-net/issues/45415 + /*{ Name = message.AuthorName }*/]; + } + + if (message.Role == AuthorRole.Assistant) + { + // Name removed temporarily as the Azure AI Inference service does not support it ATM. + // Issue: https://github.com/Azure/azure-sdk-for-net/issues/45415 + return [new ChatRequestAssistantMessage() { Content = message.Content /* Name = message.AuthorName */ }]; + } + + throw new NotSupportedException($"Role {message.Role} is not supported."); + } + + /// + /// Create a new instance of based on the provided + /// + /// Target . + /// new instance of + /// When the does not have Data or Uri. + private static ChatMessageImageContentItem GetImageContentItem(ImageContent imageContent) + { + if (imageContent.Data is { IsEmpty: false } data) + { + return new ChatMessageImageContentItem(BinaryData.FromBytes(data), imageContent.MimeType); + } + + if (imageContent.Uri is not null) + { + return new ChatMessageImageContentItem(imageContent.Uri); + } + + throw new ArgumentException($"{nameof(ImageContent)} must have either Data or a Uri."); + } + + /// + /// Captures usage details, including token information. + /// + /// Instance of with usage details. + private void LogUsage(CompletionsUsage usage) + { + if (usage is null) + { + this.Logger.LogDebug("Token usage information unavailable."); + return; + } + + if (this.Logger.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation( + "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.", + usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens); + } + + s_promptTokensCounter.Add(usage.PromptTokens); + s_completionTokensCounter.Add(usage.CompletionTokens); + s_totalTokensCounter.Add(usage.TotalTokens); + } + + /// + /// Create a new based on the provided and . + /// + /// The object representing the selected choice. + /// The object containing the response data. + /// A new object. + private ChatMessageContent GetChatMessage(ChatChoice chatChoice, ChatCompletions responseData) + { + var message = new ChatMessageContent( + new AuthorRole(chatChoice.Message.Role.ToString()), + chatChoice.Message.Content, + responseData.Model, + innerContent: responseData, + metadata: GetChatChoiceMetadata(responseData, chatChoice) + ); + return message; + } + + /// + /// Create the metadata dictionary based on the provided and . + /// + /// The object containing the response data. + /// The object representing the selected choice. + /// A new dictionary with metadata. + private static Dictionary GetChatChoiceMetadata(ChatCompletions completions, ChatChoice chatChoice) + { + return new Dictionary(5) + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.Created), completions.Created }, + { nameof(completions.Usage), completions.Usage }, + + // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. + { nameof(chatChoice.FinishReason), chatChoice.FinishReason?.ToString() }, + { nameof(chatChoice.Index), chatChoice.Index }, + }; + } + + /// + /// Create the metadata dictionary based on the provided . + /// + /// The object containing the response data. + /// A new dictionary with metadata. + private static Dictionary GetResponseMetadata(StreamingChatCompletionsUpdate completions) + { + return new Dictionary(3) + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.Created), completions.Created }, + + // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. + { nameof(completions.FinishReason), completions.FinishReason?.ToString() }, + }; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Core/RequestFailedExceptionExtensions.cs similarity index 95% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs rename to dotnet/src/Connectors/Connectors.AzureAIInference/Core/RequestFailedExceptionExtensions.cs index 51f99aa1c0cb..37d5890da116 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/RequestFailedExceptionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Core/RequestFailedExceptionExtensions.cs @@ -3,7 +3,7 @@ using System.Net; using Azure; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureAIInference; /// /// Provides extension methods for the class. diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Extensions/AzureAIInferenceKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Extensions/AzureAIInferenceKernelBuilderExtensions.cs new file mode 100644 index 000000000000..c1760d4ac316 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Extensions/AzureAIInferenceKernelBuilderExtensions.cs @@ -0,0 +1,86 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Azure.AI.Inference; +using Azure.Core; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for to configure Azure AI Inference connectors. +/// +public static class AzureAIInferenceKernelBuilderExtensions +{ + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Target Model Id for endpoints supporting more than one model + /// API Key + /// Endpoint / Target URI + /// Custom for HTTP requests. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddAzureAIInferenceChatCompletion( + this IKernelBuilder builder, + string? modelId = null, + string? apiKey = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddAzureAIInferenceChatCompletion(modelId, apiKey, endpoint, httpClient, serviceId); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Target Model Id for endpoints supporting more than one model + /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Endpoint / Target URI + /// Custom for HTTP requests. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddAzureAIInferenceChatCompletion( + this IKernelBuilder builder, + string? modelId, + TokenCredential credential, + Uri? endpoint = null, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddAzureAIInferenceChatCompletion(modelId, credential, endpoint, httpClient, serviceId); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure AI Inference model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddAzureAIInferenceChatCompletion( + this IKernelBuilder builder, + string modelId, + ChatCompletionsClient? chatClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddAzureAIInferenceChatCompletion(modelId, chatClient, serviceId); + + return builder; + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Extensions/AzureAIInferenceServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Extensions/AzureAIInferenceServiceCollectionExtensions.cs new file mode 100644 index 000000000000..b508b38537d3 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Extensions/AzureAIInferenceServiceCollectionExtensions.cs @@ -0,0 +1,106 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Azure.AI.Inference; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for to configure Azure AI Inference connectors. +/// +public static class AzureAIInferenceServiceCollectionExtensions +{ + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Target Model Id for endpoints supporting more than one model + /// API Key + /// Endpoint / Target URI + /// Custom for HTTP requests. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddAzureAIInferenceChatCompletion( + this IServiceCollection services, + string? modelId = null, + string? apiKey = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + + AzureAIInferenceChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + apiKey, + endpoint, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Target Model Id for endpoints supporting more than one model + /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Endpoint / Target URI + /// Custom for HTTP requests. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddAzureAIInferenceChatCompletion( + this IServiceCollection services, + string? modelId, + TokenCredential credential, + Uri? endpoint = null, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + + AzureAIInferenceChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + credential, + endpoint, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure AI Inference model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddAzureAIInferenceChatCompletion(this IServiceCollection services, + string modelId, + ChatCompletionsClient? chatClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + + AzureAIInferenceChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, chatClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Services/AzureAIInferenceChatCompletionService.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Services/AzureAIInferenceChatCompletionService.cs new file mode 100644 index 000000000000..0b55ac3cd696 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Services/AzureAIInferenceChatCompletionService.cs @@ -0,0 +1,96 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.Inference; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference.Core; + +namespace Microsoft.SemanticKernel.Connectors.AzureAIInference; + +/// +/// Chat completion service for Azure AI Inference. +/// +public sealed class AzureAIInferenceChatCompletionService : IChatCompletionService +{ + private readonly ChatClientCore _core; + + /// + /// Initializes a new instance of the class. + /// + /// Target Model Id for endpoints supporting more than one model + /// API Key + /// Endpoint / Target URI + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureAIInferenceChatCompletionService( + string? modelId = null, + string? apiKey = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new( + modelId, + apiKey, + endpoint, + httpClient, + loggerFactory?.CreateLogger(typeof(AzureAIInferenceChatCompletionService))); + } + + /// + /// Initializes a new instance of the class. + /// + /// Target Model Id for endpoints supporting more than one model + /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Endpoint / Target URI + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureAIInferenceChatCompletionService( + string? modelId, + TokenCredential credential, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._core = new( + modelId, + credential, + endpoint, + httpClient, + loggerFactory?.CreateLogger(typeof(AzureAIInferenceChatCompletionService))); + } + + /// + /// Initializes a new instance of the class providing your own ChatCompletionsClient instance. + /// + /// Target Model Id for endpoints supporting more than one model + /// Breaking glass for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureAIInferenceChatCompletionService( + string? modelId, + ChatCompletionsClient chatClient, + ILoggerFactory? loggerFactory = null) + { + this._core = new( + modelId, + chatClient, + loggerFactory?.CreateLogger(typeof(AzureAIInferenceChatCompletionService))); + } + + /// + public IReadOnlyDictionary Attributes => this._core.Attributes; + + /// + public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + + /// + public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs new file mode 100644 index 000000000000..db502f3ebf4d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAIInference/Settings/AzureAIInferencePromptExecutionSettings.cs @@ -0,0 +1,281 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.AI.Inference; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.AzureAIInference; + +/// +/// Chat completion prompt execution settings. +/// +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class AzureAIInferencePromptExecutionSettings : PromptExecutionSettings +{ + /// + /// Initializes a new instance of the class. + /// + public AzureAIInferencePromptExecutionSettings() + { + this.ExtensionData = new Dictionary(); + } + + /// + /// Allowed values: "error" | "drop" | "pass-through" + /// + [JsonPropertyName("extra_parameters")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ExtraParameters + { + get => this._extraParameters; + set + { + this.ThrowIfFrozen(); + this._extraParameters = value; + } + } + + /// + /// A value that influences the probability of generated tokens appearing based on their cumulative + /// frequency in generated text. + /// Positive values will make tokens less likely to appear as their frequency increases and + /// decrease the likelihood of the model repeating the same statements verbatim. + /// Supported range is [-2, 2]. + /// + [JsonPropertyName("frequency_penalty")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? FrequencyPenalty + { + get => this._frequencyPenalty; + set + { + this.ThrowIfFrozen(); + this._frequencyPenalty = value; + } + } + + /// + /// A value that influences the probability of generated tokens appearing based on their existing + /// presence in generated text. + /// Positive values will make tokens less likely to appear when they already exist and increase the + /// model's likelihood to output new topics. + /// Supported range is [-2, 2]. + /// + [JsonPropertyName("presence_penalty")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? PresencePenalty + { + get => this._presencePenalty; + set + { + this.ThrowIfFrozen(); + this._presencePenalty = value; + } + } + + /// + /// The sampling temperature to use that controls the apparent creativity of generated completions. + /// Higher values will make output more random while lower values will make results more focused + /// and deterministic. + /// It is not recommended to modify temperature and top_p for the same completions request as the + /// interaction of these two settings is difficult to predict. + /// Supported range is [0, 1]. + /// + [JsonPropertyName("temperature")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature + { + get => this._temperature; + set + { + this.ThrowIfFrozen(); + this._temperature = value; + } + } + + /// + /// An alternative to sampling with temperature called nucleus sampling. This value causes the + /// model to consider the results of tokens with the provided probability mass. As an example, a + /// value of 0.15 will cause only the tokens comprising the top 15% of probability mass to be + /// considered. + /// It is not recommended to modify temperature and top_p for the same completions request as the + /// interaction of these two settings is difficult to predict. + /// Supported range is [0, 1]. + /// + [JsonPropertyName("top_p")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? NucleusSamplingFactor + { + get => this._nucleusSamplingFactor; + set + { + this.ThrowIfFrozen(); + this._nucleusSamplingFactor = value; + } + } + + /// The maximum number of tokens to generate. + [JsonPropertyName("max_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? MaxTokens + { + get => this._maxTokens; + set + { + this.ThrowIfFrozen(); + this._maxTokens = value; + } + } + + /// + /// The format that the model must output. Use this to enable JSON mode instead of the default text mode. + /// Note that to enable JSON mode, some AI models may also require you to instruct the model to produce JSON + /// via a system or user message. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + [JsonPropertyName("response_format")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public object? ResponseFormat + { + get => this._responseFormat; + set + { + this.ThrowIfFrozen(); + this._responseFormat = value; + } + } + + /// A collection of textual sequences that will end completions generation. + [JsonPropertyName("stop")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IList StopSequences + { + get => this._stopSequences; + set + { + this.ThrowIfFrozen(); + this._stopSequences = value; + } + } + + /// + /// The available tool definitions that the chat completions request can use, including caller-defined functions. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + [JsonPropertyName("tools")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IList Tools + { + get => this._tools; + set + { + this.ThrowIfFrozen(); + this._tools = value; + } + } + + /// + /// If specified, the system will make a best effort to sample deterministically such that repeated requests with the + /// same seed and parameters should return the same result. Determinism is not guaranteed. + /// + [JsonPropertyName("seed")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public long? Seed + { + get => this._seed; + set + { + this.ThrowIfFrozen(); + this._seed = value; + } + } + + /// + public override void Freeze() + { + if (this.IsFrozen) + { + return; + } + + base.Freeze(); + + if (this._stopSequences is not null) + { + this._stopSequences = new ReadOnlyCollection(this._stopSequences); + } + + if (this._tools is not null) + { + this._tools = new ReadOnlyCollection(this._tools); + } + } + + /// + public override PromptExecutionSettings Clone() + { + return new AzureAIInferencePromptExecutionSettings() + { + ExtraParameters = this.ExtraParameters, + FrequencyPenalty = this.FrequencyPenalty, + PresencePenalty = this.PresencePenalty, + Temperature = this.Temperature, + NucleusSamplingFactor = this.NucleusSamplingFactor, + MaxTokens = this.MaxTokens, + ResponseFormat = this.ResponseFormat, + StopSequences = new List(this.StopSequences), + Tools = new List(this.Tools), + Seed = this.Seed, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + }; + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// An instance of + public static AzureAIInferencePromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings) + { + if (executionSettings is null) + { + return new AzureAIInferencePromptExecutionSettings(); + } + + if (executionSettings is AzureAIInferencePromptExecutionSettings settings) + { + return settings; + } + + var json = JsonSerializer.Serialize(executionSettings); + + var aiInferenceSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + if (aiInferenceSettings is not null) + { + return aiInferenceSettings; + } + + throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(AzureAIInferencePromptExecutionSettings)}", nameof(executionSettings)); + } + + #region private ================================================================================ + + private string? _extraParameters; + private float? _frequencyPenalty; + private float? _presencePenalty; + private float? _temperature; + private float? _nucleusSamplingFactor; + private int? _maxTokens; + private object? _responseFormat; + private IList _stopSequences = []; + private IList _tools = []; + private long? _seed; + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..28b9ee74e0ea --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchGenericDataModelMapperTests.cs @@ -0,0 +1,286 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Connectors.AzureAISearch; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.Connectors.AzureAISearch.UnitTests; + +/// +/// Tests for the class. +/// +public class AzureAISearchGenericDataModelMapperTests +{ + private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), + new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + private static readonly float[] s_vector1 = new float[] { 1.0f, 2.0f, 3.0f }; + private static readonly float[] s_vector2 = new float[] { 4.0f, 5.0f, 6.0f }; + private static readonly string[] s_taglist = new string[] { "tag1", "tag2" }; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange + var sut = new AzureAISearchGenericDataModelMapper(s_vectorStoreRecordDefinition); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["TagListDataProp"] = s_taglist, + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_vector1), + ["NullableFloatVector"] = new ReadOnlyMemory(s_vector2), + }, + }; + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", (string?)storageModel["Key"]); + Assert.Equal("string", (string?)storageModel["StringDataProp"]); + Assert.Equal(1, (int?)storageModel["IntDataProp"]); + Assert.Equal(2, (int?)storageModel["NullableIntDataProp"]); + Assert.Equal(3L, (long?)storageModel["LongDataProp"]); + Assert.Equal(4L, (long?)storageModel["NullableLongDataProp"]); + Assert.Equal(5.0f, (float?)storageModel["FloatDataProp"]); + Assert.Equal(6.0f, (float?)storageModel["NullableFloatDataProp"]); + Assert.Equal(7.0, (double?)storageModel["DoubleDataProp"]); + Assert.Equal(8.0, (double?)storageModel["NullableDoubleDataProp"]); + Assert.Equal(true, (bool?)storageModel["BoolDataProp"]); + Assert.Equal(false, (bool?)storageModel["NullableBoolDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["NullableDateTimeOffsetDataProp"]); + Assert.Equal(s_taglist, storageModel["TagListDataProp"]!.AsArray().Select(x => (string)x!).ToArray()); + Assert.Equal(s_vector1, storageModel["FloatVector"]!.AsArray().Select(x => (float)x!).ToArray()); + Assert.Equal(s_vector2, storageModel["NullableFloatVector"]!.AsArray().Select(x => (float)x!).ToArray()); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + }, + Vectors = + { + ["NullableFloatVector"] = null, + }, + }; + + var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Null(storageModel["StringDataProp"]); + Assert.Null(storageModel["NullableIntDataProp"]); + Assert.Null(storageModel["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange + var sut = new AzureAISearchGenericDataModelMapper(s_vectorStoreRecordDefinition); + var storageModel = new JsonObject(); + storageModel["Key"] = "key"; + storageModel["StringDataProp"] = "string"; + storageModel["IntDataProp"] = 1; + storageModel["NullableIntDataProp"] = 2; + storageModel["LongDataProp"] = 3L; + storageModel["NullableLongDataProp"] = 4L; + storageModel["FloatDataProp"] = 5.0f; + storageModel["NullableFloatDataProp"] = 6.0f; + storageModel["DoubleDataProp"] = 7.0; + storageModel["NullableDoubleDataProp"] = 8.0; + storageModel["BoolDataProp"] = true; + storageModel["NullableBoolDataProp"] = false; + storageModel["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero); + storageModel["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero); + storageModel["TagListDataProp"] = new JsonArray { "tag1", "tag2" }; + storageModel["FloatVector"] = new JsonArray { 1.0f, 2.0f, 3.0f }; + storageModel["NullableFloatVector"] = new JsonArray { 4.0f, 5.0f, 6.0f }; + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Equal("string", dataModel.Data["StringDataProp"]); + Assert.Equal(1, dataModel.Data["IntDataProp"]); + Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); + Assert.Equal(3L, dataModel.Data["LongDataProp"]); + Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); + Assert.Equal(5.0f, dataModel.Data["FloatDataProp"]); + Assert.Equal(6.0f, dataModel.Data["NullableFloatDataProp"]); + Assert.Equal(7.0, dataModel.Data["DoubleDataProp"]); + Assert.Equal(8.0, dataModel.Data["NullableDoubleDataProp"]); + Assert.Equal(true, dataModel.Data["BoolDataProp"]); + Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["NullableDateTimeOffsetDataProp"]); + Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); + Assert.Equal(s_vector1, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + Assert.Equal(s_vector2, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var storageModel = new JsonObject(); + storageModel["Key"] = "key"; + storageModel["StringDataProp"] = null; + storageModel["NullableIntDataProp"] = null; + storageModel["NullableFloatVector"] = null; + + var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Null(dataModel.Data["StringDataProp"]); + Assert.Null(dataModel.Data["NullableIntDataProp"]); + Assert.Null(dataModel.Vectors["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelThrowsForMissingKey() + { + // Arrange + var sut = new AzureAISearchGenericDataModelMapper(s_vectorStoreRecordDefinition); + var storageModel = new JsonObject(); + + // Act + var exception = Assert.Throws(() => sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true })); + + // Assert + Assert.Equal("The key property 'Key' is missing from the record retrieved from storage.", exception.Message); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key"); + var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", (string?)storageModel["Key"]); + Assert.False(storageModel.ContainsKey("StringDataProp")); + Assert.False(storageModel.ContainsKey("FloatVector")); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var storageModel = new JsonObject(); + storageModel["Key"] = "key"; + + var sut = new AzureAISearchGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.False(dataModel.Data.ContainsKey("StringDataProp")); + Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs index c303613248f0..51771cbcbc23 100644 --- a/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureAISearch.UnitTests/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -206,7 +206,7 @@ public async Task CanGetRecordWithoutVectorsAsync(bool useDefinition, bool useCu // Arrange. var storageObject = JsonSerializer.SerializeToNode(CreateModel(TestRecordKey1, false))!.AsObject(); - var expectedSelectFields = useCustomJsonSerializerOptions ? new[] { "storage_data1", "data2", "key" } : new[] { "storage_data1", "Data2", "Key" }; + var expectedSelectFields = useCustomJsonSerializerOptions ? new[] { "key", "storage_data1", "data2" } : new[] { "Key", "storage_data1", "Data2" }; this._searchClientMock.Setup( x => x.GetDocumentAsync( TestRecordKey1, diff --git a/dotnet/src/Experimental/Agents.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/.editorconfig similarity index 100% rename from dotnet/src/Experimental/Agents.UnitTests/.editorconfig rename to dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/.editorconfig diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..e2b02c35a41f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBGenericDataModelMapperTests.cs @@ -0,0 +1,310 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBMongoDBGenericDataModelMapperTests +{ + private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("DecimalDataProp", typeof(decimal)), + new VectorStoreRecordDataProperty("NullableDecimalDataProp", typeof(decimal?)), + new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), + new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)), + }, + }; + + private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; + private static readonly double[] s_doubleVector = [1.0f, 2.0f, 3.0f]; + private static readonly List s_taglist = ["tag1", "tag2"]; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(s_vectorStoreRecordDefinition); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["DecimalDataProp"] = 9.0m, + ["NullableDecimalDataProp"] = 10.0m, + ["DateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), + ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), + ["TagListDataProp"] = s_taglist, + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), + ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), + ["NullableDoubleVector"] = new ReadOnlyMemory(s_doubleVector), + }, + }; + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel["_id"]); + Assert.Equal(true, (bool?)storageModel["BoolDataProp"]); + Assert.Equal(false, (bool?)storageModel["NullableBoolDataProp"]); + Assert.Equal("string", (string?)storageModel["StringDataProp"]); + Assert.Equal(1, (int?)storageModel["IntDataProp"]); + Assert.Equal(2, (int?)storageModel["NullableIntDataProp"]); + Assert.Equal(3L, (long?)storageModel["LongDataProp"]); + Assert.Equal(4L, (long?)storageModel["NullableLongDataProp"]); + Assert.Equal(5.0f, (float?)storageModel["FloatDataProp"].AsDouble); + Assert.Equal(6.0f, (float?)storageModel["NullableFloatDataProp"].AsNullableDouble); + Assert.Equal(7.0, (double?)storageModel["DoubleDataProp"]); + Assert.Equal(8.0, (double?)storageModel["NullableDoubleDataProp"]); + Assert.Equal(9.0m, (decimal?)storageModel["DecimalDataProp"]); + Assert.Equal(10.0m, (decimal?)storageModel["NullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), storageModel["DateTimeDataProp"].ToUniversalTime()); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), storageModel["NullableDateTimeDataProp"].ToUniversalTime()); + Assert.Equal(s_taglist, storageModel["TagListDataProp"]!.AsBsonArray.Select(x => (string)x!).ToArray()); + Assert.Equal(s_floatVector, storageModel["FloatVector"]!.AsBsonArray.Select(x => (float)x.AsDouble!).ToArray()); + Assert.Equal(s_floatVector, storageModel["NullableFloatVector"]!.AsBsonArray.Select(x => (float)x.AsNullableDouble!).ToArray()); + Assert.Equal(s_doubleVector, storageModel["DoubleVector"]!.AsBsonArray.Select(x => (double)x!).ToArray()); + Assert.Equal(s_doubleVector, storageModel["NullableDoubleVector"]!.AsBsonArray.Select(x => (double)x!).ToArray()); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + }, + Vectors = + { + ["NullableFloatVector"] = null, + }, + }; + + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal(BsonNull.Value, storageModel["StringDataProp"]); + Assert.Equal(BsonNull.Value, storageModel["NullableIntDataProp"]); + Assert.Empty(storageModel["NullableFloatVector"].AsBsonArray); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(s_vectorStoreRecordDefinition); + var storageModel = new BsonDocument + { + ["_id"] = "key", + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["DecimalDataProp"] = 9.0m, + ["NullableDecimalDataProp"] = 10.0m, + ["DateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), + ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), + ["TagListDataProp"] = BsonArray.Create(s_taglist), + ["FloatVector"] = BsonArray.Create(s_floatVector), + ["NullableFloatVector"] = BsonArray.Create(s_floatVector), + ["DoubleVector"] = BsonArray.Create(s_doubleVector), + ["NullableDoubleVector"] = BsonArray.Create(s_doubleVector) + }; + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Equal(true, dataModel.Data["BoolDataProp"]); + Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); + Assert.Equal("string", dataModel.Data["StringDataProp"]); + Assert.Equal(1, dataModel.Data["IntDataProp"]); + Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); + Assert.Equal(3L, dataModel.Data["LongDataProp"]); + Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); + Assert.Equal(5.0f, dataModel.Data["FloatDataProp"]); + Assert.Equal(6.0f, dataModel.Data["NullableFloatDataProp"]); + Assert.Equal(7.0, dataModel.Data["DoubleDataProp"]); + Assert.Equal(8.0, dataModel.Data["NullableDoubleDataProp"]); + Assert.Equal(9.0m, dataModel.Data["DecimalDataProp"]); + Assert.Equal(10.0m, dataModel.Data["NullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), dataModel.Data["DateTimeDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0).ToUniversalTime(), dataModel.Data["NullableDateTimeDataProp"]); + Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["DoubleVector"]!).ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["NullableDoubleVector"]!)!.ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var storageModel = new BsonDocument + { + ["_id"] = "key", + ["StringDataProp"] = BsonNull.Value, + ["NullableIntDataProp"] = BsonNull.Value, + ["NullableFloatVector"] = BsonNull.Value + }; + + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Null(dataModel.Data["StringDataProp"]); + Assert.Null(dataModel.Data["NullableIntDataProp"]); + Assert.Null(dataModel.Vectors["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelThrowsForMissingKey() + { + // Arrange + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(s_vectorStoreRecordDefinition); + var storageModel = new BsonDocument(); + + // Act & Assert + var exception = Assert.Throws( + () => sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true })); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key"); + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", (string?)storageModel["_id"]); + Assert.False(storageModel.Contains("StringDataProp")); + Assert.False(storageModel.Contains("FloatVector")); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var storageModel = new BsonDocument + { + ["_id"] = "key" + }; + + var sut = new AzureCosmosDBMongoDBGenericDataModelMapper(vectorStoreRecordDefinition); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.False(dataModel.Data.ContainsKey("StringDataProp")); + Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs new file mode 100644 index 000000000000..7c8a3208446d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBHotelModel.cs @@ -0,0 +1,44 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson.Serialization.Attributes; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +public class AzureCosmosDBMongoDBHotelModel(string hotelId) +{ + /// The key of the record. + [VectorStoreRecordKey] + public string HotelId { get; init; } = hotelId; + + /// A string metadata field. + [VectorStoreRecordData] + public string? HotelName { get; set; } + + /// An int metadata field. + [VectorStoreRecordData] + public int HotelCode { get; set; } + + /// A float metadata field. + [VectorStoreRecordData] + public float? HotelRating { get; set; } + + /// A bool metadata field. + [BsonElement("parking_is_included")] + [VectorStoreRecordData] + public bool ParkingIncluded { get; set; } + + /// An array metadata field. + [VectorStoreRecordData] + public List Tags { get; set; } = []; + + /// A data field. + [VectorStoreRecordData] + public string? Description { get; set; } + + /// A vector field. + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.IvfFlat, DistanceFunction: DistanceFunction.CosineDistance)] + public ReadOnlyMemory? DescriptionEmbedding { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..5bdac2ee460b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBKernelBuilderExtensionsTests.cs @@ -0,0 +1,55 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; +using MongoDB.Driver; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBMongoDBKernelBuilderExtensionsTests +{ + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + + [Fact] + public void AddVectorStoreRegistersClass() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(Mock.Of()); + + // Act + this._kernelBuilder.AddAzureCosmosDBMongoDBVectorStore(); + + var kernel = this._kernelBuilder.Build(); + var vectorStore = kernel.Services.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } + + [Fact] + public void AddVectorStoreWithConnectionStringRegistersClass() + { + // Act + this._kernelBuilder.AddAzureCosmosDBMongoDBVectorStore("mongodb://localhost:27017", "mydb"); + + var kernel = this._kernelBuilder.Build(); + var vectorStore = kernel.Services.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + + var database = (IMongoDatabase)vectorStore.GetType().GetField("_mongoDatabase", BindingFlags.NonPublic | BindingFlags.Instance)!.GetValue(vectorStore)!; + Assert.Equal(HttpHeaderConstant.Values.UserAgent, database.Client.Settings.ApplicationName); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..2a365d44e650 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBServiceCollectionExtensionsTests.cs @@ -0,0 +1,55 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; +using MongoDB.Driver; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBMongoDBServiceCollectionExtensionsTests +{ + private readonly IServiceCollection _serviceCollection = new ServiceCollection(); + + [Fact] + public void AddVectorStoreRegistersClass() + { + // Arrange + this._serviceCollection.AddSingleton(Mock.Of()); + + // Act + this._serviceCollection.AddAzureCosmosDBMongoDBVectorStore(); + + var serviceProvider = this._serviceCollection.BuildServiceProvider(); + var vectorStore = serviceProvider.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } + + [Fact] + public void AddVectorStoreWithConnectionStringRegistersClass() + { + // Act + this._serviceCollection.AddAzureCosmosDBMongoDBVectorStore("mongodb://localhost:27017", "mydb"); + + var serviceProvider = this._serviceCollection.BuildServiceProvider(); + var vectorStore = serviceProvider.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + + var database = (IMongoDatabase)vectorStore.GetType().GetField("_mongoDatabase", BindingFlags.NonPublic | BindingFlags.Instance)!.GetValue(vectorStore)!; + Assert.Equal(HttpHeaderConstant.Values.UserAgent, database.Client.Settings.ApplicationName); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..698a4caa443c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -0,0 +1,651 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; +using MongoDB.Bson.Serialization; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionTests +{ + private readonly Mock _mockMongoDatabase = new(); + private readonly Mock> _mockMongoCollection = new(); + + public AzureCosmosDBMongoDBVectorStoreRecordCollectionTests() + { + this._mockMongoDatabase + .Setup(l => l.GetCollection(It.IsAny(), It.IsAny())) + .Returns(this._mockMongoCollection.Object); + } + + [Fact] + public void ConstructorForModelWithoutKeyThrowsException() + { + // Act & Assert + var exception = Assert.Throws(() => new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, "collection")); + Assert.Contains("No key property found", exception.Message); + } + + [Fact] + public void ConstructorWithDeclarativeModelInitializesCollection() + { + // Act & Assert + var collection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + Assert.NotNull(collection); + } + + [Fact] + public void ConstructorWithImperativeModelInitializesCollection() + { + // Arrange + var definition = new VectorStoreRecordDefinition + { + Properties = [new VectorStoreRecordKeyProperty("Id", typeof(string))] + }; + + // Act + var collection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection", + new() { VectorStoreRecordDefinition = definition }); + + // Assert + Assert.NotNull(collection); + } + + [Theory] + [MemberData(nameof(CollectionExistsData))] + public async Task CollectionExistsReturnsValidResultAsync(List collections, string collectionName, bool expectedResult) + { + // Arrange + var mockCursor = new Mock>(); + + mockCursor + .Setup(l => l.MoveNextAsync(It.IsAny())) + .ReturnsAsync(true); + + mockCursor + .Setup(l => l.Current) + .Returns(collections); + + this._mockMongoDatabase + .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) + .ReturnsAsync(mockCursor.Object); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + collectionName); + + // Act + var actualResult = await sut.CollectionExistsAsync(); + + // Assert + Assert.Equal(expectedResult, actualResult); + } + + [Theory] + [InlineData(true, 0)] + [InlineData(false, 1)] + public async Task CreateCollectionInvokesValidMethodsAsync(bool indexExists, int actualIndexCreations) + { + // Arrange + const string CollectionName = "collection"; + + List indexes = indexExists ? [new BsonDocument { ["name"] = "DescriptionEmbedding_" }] : []; + + var mockIndexCursor = new Mock>(); + mockIndexCursor + .SetupSequence(l => l.MoveNext(It.IsAny())) + .Returns(true) + .Returns(false); + + mockIndexCursor + .Setup(l => l.Current) + .Returns(indexes); + + var mockMongoIndexManager = new Mock>(); + + mockMongoIndexManager + .Setup(l => l.ListAsync(It.IsAny())) + .ReturnsAsync(mockIndexCursor.Object); + + this._mockMongoCollection + .Setup(l => l.Indexes) + .Returns(mockMongoIndexManager.Object); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(this._mockMongoDatabase.Object, CollectionName); + + // Act + await sut.CreateCollectionAsync(); + + // Assert + this._mockMongoDatabase.Verify(l => l.CreateCollectionAsync( + CollectionName, + It.IsAny(), + It.IsAny()), Times.Once()); + + this._mockMongoDatabase.Verify(l => l.RunCommandAsync( + It.Is>(command => + command.Document["createIndexes"] == CollectionName && + command.Document["indexes"].GetType() == typeof(BsonArray) && + ((BsonArray)command.Document["indexes"]).Count == 1), + It.IsAny(), + It.IsAny()), Times.Exactly(actualIndexCreations)); + } + + [Theory] + [MemberData(nameof(CreateCollectionIfNotExistsData))] + public async Task CreateCollectionIfNotExistsInvokesValidMethodsAsync(List collections, int actualCollectionCreations) + { + // Arrange + const string CollectionName = "collection"; + + var mockCursor = new Mock>(); + mockCursor + .Setup(l => l.MoveNextAsync(It.IsAny())) + .ReturnsAsync(true); + + mockCursor + .Setup(l => l.Current) + .Returns(collections); + + this._mockMongoDatabase + .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) + .ReturnsAsync(mockCursor.Object); + + var mockIndexCursor = new Mock>(); + mockIndexCursor + .SetupSequence(l => l.MoveNext(It.IsAny())) + .Returns(true) + .Returns(false); + + mockIndexCursor + .Setup(l => l.Current) + .Returns([]); + + var mockMongoIndexManager = new Mock>(); + + mockMongoIndexManager + .Setup(l => l.ListAsync(It.IsAny())) + .ReturnsAsync(mockIndexCursor.Object); + + this._mockMongoCollection + .Setup(l => l.Indexes) + .Returns(mockMongoIndexManager.Object); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + CollectionName); + + // Act + await sut.CreateCollectionIfNotExistsAsync(); + + // Assert + this._mockMongoDatabase.Verify(l => l.CreateCollectionAsync( + CollectionName, + It.IsAny(), + It.IsAny()), Times.Exactly(actualCollectionCreations)); + } + + [Fact] + public async Task DeleteInvokesValidMethodsAsync() + { + // Arrange + const string RecordKey = "key"; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + var serializerRegistry = BsonSerializer.SerializerRegistry; + var documentSerializer = serializerRegistry.GetSerializer(); + var expectedDefinition = Builders.Filter.Eq(document => document["_id"], RecordKey); + + // Act + await sut.DeleteAsync(RecordKey); + + // Assert + this._mockMongoCollection.Verify(l => l.DeleteOneAsync( + It.Is>(definition => + definition.Render(documentSerializer, serializerRegistry) == + expectedDefinition.Render(documentSerializer, serializerRegistry)), + It.IsAny()), Times.Once()); + } + + [Fact] + public async Task DeleteBatchInvokesValidMethodsAsync() + { + // Arrange + List recordKeys = ["key1", "key2"]; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + var serializerRegistry = BsonSerializer.SerializerRegistry; + var documentSerializer = serializerRegistry.GetSerializer(); + var expectedDefinition = Builders.Filter.In(document => document["_id"].AsString, recordKeys); + + // Act + await sut.DeleteBatchAsync(recordKeys); + + // Assert + this._mockMongoCollection.Verify(l => l.DeleteManyAsync( + It.Is>(definition => + definition.Render(documentSerializer, serializerRegistry) == + expectedDefinition.Render(documentSerializer, serializerRegistry)), + It.IsAny()), Times.Once()); + } + + [Fact] + public async Task DeleteCollectionInvokesValidMethodsAsync() + { + // Arrange + const string CollectionName = "collection"; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + CollectionName); + + // Act + await sut.DeleteCollectionAsync(); + + // Assert + this._mockMongoDatabase.Verify(l => l.DropCollectionAsync( + It.Is(name => name == CollectionName), + It.IsAny()), Times.Once()); + } + + [Fact] + public async Task GetReturnsValidRecordAsync() + { + // Arrange + const string RecordKey = "key"; + + var document = new BsonDocument { ["_id"] = RecordKey, ["HotelName"] = "Test Name" }; + + var mockCursor = new Mock>(); + mockCursor + .Setup(l => l.MoveNextAsync(It.IsAny())) + .ReturnsAsync(true); + + mockCursor + .Setup(l => l.Current) + .Returns([document]); + + this._mockMongoCollection + .Setup(l => l.FindAsync( + It.IsAny>(), + It.IsAny>(), + It.IsAny())) + .ReturnsAsync(mockCursor.Object); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + // Act + var result = await sut.GetAsync(RecordKey); + + // Assert + Assert.NotNull(result); + Assert.Equal(RecordKey, result.HotelId); + Assert.Equal("Test Name", result.HotelName); + } + + [Fact] + public async Task GetBatchReturnsValidRecordAsync() + { + // Arrange + var document1 = new BsonDocument { ["_id"] = "key1", ["HotelName"] = "Test Name 1" }; + var document2 = new BsonDocument { ["_id"] = "key2", ["HotelName"] = "Test Name 2" }; + var document3 = new BsonDocument { ["_id"] = "key3", ["HotelName"] = "Test Name 3" }; + + var mockCursor = new Mock>(); + mockCursor + .SetupSequence(l => l.MoveNextAsync(It.IsAny())) + .ReturnsAsync(true) + .ReturnsAsync(false); + + mockCursor + .Setup(l => l.Current) + .Returns([document1, document2, document3]); + + this._mockMongoCollection + .Setup(l => l.FindAsync( + It.IsAny>(), + It.IsAny>(), + It.IsAny())) + .ReturnsAsync(mockCursor.Object); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + // Act + var results = await sut.GetBatchAsync(["key1", "key2", "key3"]).ToListAsync(); + + // Assert + Assert.NotNull(results[0]); + Assert.Equal("key1", results[0].HotelId); + Assert.Equal("Test Name 1", results[0].HotelName); + + Assert.NotNull(results[1]); + Assert.Equal("key2", results[1].HotelId); + Assert.Equal("Test Name 2", results[1].HotelName); + + Assert.NotNull(results[2]); + Assert.Equal("key3", results[2].HotelId); + Assert.Equal("Test Name 3", results[2].HotelName); + } + + [Fact] + public async Task UpsertReturnsRecordKeyAsync() + { + // Arrange + var hotel = new AzureCosmosDBMongoDBHotelModel("key") { HotelName = "Test Name" }; + + var serializerRegistry = BsonSerializer.SerializerRegistry; + var documentSerializer = serializerRegistry.GetSerializer(); + var expectedDefinition = Builders.Filter.Eq(document => document["_id"], "key"); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + // Act + var result = await sut.UpsertAsync(hotel); + + // Assert + Assert.Equal("key", result); + + this._mockMongoCollection.Verify(l => l.ReplaceOneAsync( + It.Is>(definition => + definition.Render(documentSerializer, serializerRegistry) == + expectedDefinition.Render(documentSerializer, serializerRegistry)), + It.Is(document => + document["_id"] == "key" && + document["HotelName"] == "Test Name"), + It.IsAny(), + It.IsAny()), Times.Once()); + } + + [Fact] + public async Task UpsertBatchReturnsRecordKeysAsync() + { + // Arrange + var hotel1 = new AzureCosmosDBMongoDBHotelModel("key1") { HotelName = "Test Name 1" }; + var hotel2 = new AzureCosmosDBMongoDBHotelModel("key2") { HotelName = "Test Name 2" }; + var hotel3 = new AzureCosmosDBMongoDBHotelModel("key3") { HotelName = "Test Name 3" }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection"); + + // Act + var results = await sut.UpsertBatchAsync([hotel1, hotel2, hotel3]).ToListAsync(); + + // Assert + Assert.NotNull(results); + Assert.Equal(3, results.Count); + + Assert.Equal("key1", results[0]); + Assert.Equal("key2", results[1]); + Assert.Equal("key3", results[2]); + } + + [Fact] + public async Task UpsertWithModelWorksCorrectlyAsync() + { + var definition = new VectorStoreRecordDefinition + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Id", typeof(string)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)) + } + }; + + await this.TestUpsertWithModelAsync( + dataModel: new TestModel { Id = "key", HotelName = "Test Name" }, + expectedPropertyName: "HotelName", + definition: definition); + } + + [Fact] + public async Task UpsertWithVectorStoreModelWorksCorrectlyAsync() + { + await this.TestUpsertWithModelAsync( + dataModel: new VectorStoreTestModel { Id = "key", HotelName = "Test Name" }, + expectedPropertyName: "HotelName"); + } + + [Fact] + public async Task UpsertWithBsonModelWorksCorrectlyAsync() + { + var definition = new VectorStoreRecordDefinition + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Id", typeof(string)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)) + } + }; + + await this.TestUpsertWithModelAsync( + dataModel: new BsonTestModel { Id = "key", HotelName = "Test Name" }, + expectedPropertyName: "hotel_name", + definition: definition); + } + + [Fact] + public async Task UpsertWithBsonVectorStoreModelWorksCorrectlyAsync() + { + await this.TestUpsertWithModelAsync( + dataModel: new BsonVectorStoreTestModel { Id = "key", HotelName = "Test Name" }, + expectedPropertyName: "hotel_name"); + } + + [Fact] + public async Task UpsertWithBsonVectorStoreWithNameModelWorksCorrectlyAsync() + { + await this.TestUpsertWithModelAsync( + dataModel: new BsonVectorStoreWithNameTestModel { Id = "key", HotelName = "Test Name" }, + expectedPropertyName: "bson_hotel_name"); + } + + [Fact] + public async Task UpsertWithCustomMapperWorksCorrectlyAsync() + { + // Arrange + var hotel = new AzureCosmosDBMongoDBHotelModel("key") { HotelName = "Test Name" }; + + var mockMapper = new Mock>(); + + mockMapper + .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) + .Returns(new BsonDocument { ["_id"] = "key", ["my_name"] = "Test Name" }); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection", + new() { BsonDocumentCustomMapper = mockMapper.Object }); + + // Act + var result = await sut.UpsertAsync(hotel); + + // Assert + Assert.Equal("key", result); + + this._mockMongoCollection.Verify(l => l.ReplaceOneAsync( + It.IsAny>(), + It.Is(document => + document["_id"] == "key" && + document["my_name"] == "Test Name"), + It.IsAny(), + It.IsAny()), Times.Once()); + } + + [Fact] + public async Task GetWithCustomMapperWorksCorrectlyAsync() + { + // Arrange + const string RecordKey = "key"; + + var document = new BsonDocument { ["_id"] = RecordKey, ["my_name"] = "Test Name" }; + + var mockCursor = new Mock>(); + mockCursor + .Setup(l => l.MoveNextAsync(It.IsAny())) + .ReturnsAsync(true); + + mockCursor + .Setup(l => l.Current) + .Returns([document]); + + this._mockMongoCollection + .Setup(l => l.FindAsync( + It.IsAny>(), + It.IsAny>(), + It.IsAny())) + .ReturnsAsync(mockCursor.Object); + + var mockMapper = new Mock>(); + + mockMapper + .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) + .Returns(new AzureCosmosDBMongoDBHotelModel(RecordKey) { HotelName = "Name from mapper" }); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection", + new() { BsonDocumentCustomMapper = mockMapper.Object }); + + // Act + var result = await sut.GetAsync(RecordKey); + + // Assert + Assert.NotNull(result); + Assert.Equal(RecordKey, result.HotelId); + Assert.Equal("Name from mapper", result.HotelName); + } + + public static TheoryData, string, bool> CollectionExistsData => new() + { + { ["collection-2"], "collection-2", true }, + { [], "non-existent-collection", false } + }; + + public static TheoryData, int> CreateCollectionIfNotExistsData => new() + { + { ["collection"], 0 }, + { [], 1 } + }; + + #region private + + private async Task TestUpsertWithModelAsync( + TDataModel dataModel, + string expectedPropertyName, + VectorStoreRecordDefinition? definition = null) + where TDataModel : class + { + // Arrange + var serializerRegistry = BsonSerializer.SerializerRegistry; + var documentSerializer = serializerRegistry.GetSerializer(); + var expectedDefinition = Builders.Filter.Eq(document => document["_id"], "key"); + + AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = definition != null ? + new() { VectorStoreRecordDefinition = definition } : + null; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection", + options); + + // Act + var result = await sut.UpsertAsync(dataModel); + + // Assert + Assert.Equal("key", result); + + this._mockMongoCollection.Verify(l => l.ReplaceOneAsync( + It.Is>(definition => + definition.Render(documentSerializer, serializerRegistry) == + expectedDefinition.Render(documentSerializer, serializerRegistry)), + It.Is(document => + document["_id"] == "key" && + document.Contains(expectedPropertyName) && + document[expectedPropertyName] == "Test Name"), + It.IsAny(), + It.IsAny()), Times.Once()); + } + +#pragma warning disable CA1812 + private sealed class TestModel + { + public string? Id { get; set; } + + public string? HotelName { get; set; } + } + + private sealed class VectorStoreTestModel + { + [VectorStoreRecordKey] + public string? Id { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "hotel_name")] + public string? HotelName { get; set; } + } + + private sealed class BsonTestModel + { + [BsonId] + public string? Id { get; set; } + + [BsonElement("hotel_name")] + public string? HotelName { get; set; } + } + + private sealed class BsonVectorStoreTestModel + { + [BsonId] + [VectorStoreRecordKey] + public string? Id { get; set; } + + [BsonElement("hotel_name")] + [VectorStoreRecordData] + public string? HotelName { get; set; } + } + + private sealed class BsonVectorStoreWithNameTestModel + { + [BsonId] + [VectorStoreRecordKey] + public string? Id { get; set; } + + [BsonElement("bson_hotel_name")] + [VectorStoreRecordData(StoragePropertyName = "storage_hotel_name")] + public string? HotelName { get; set; } + } +#pragma warning restore CA1812 + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordMapperTests.cs new file mode 100644 index 000000000000..9546789b7947 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreRecordMapperTests.cs @@ -0,0 +1,88 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBMongoDBVectorStoreRecordMapperTests +{ + private readonly AzureCosmosDBMongoDBVectorStoreRecordMapper _sut; + + public AzureCosmosDBMongoDBVectorStoreRecordMapperTests() + { + var keyProperty = new VectorStoreRecordKeyProperty("HotelId", typeof(string)); + + var definition = new VectorStoreRecordDefinition + { + Properties = + [ + keyProperty, + new VectorStoreRecordDataProperty("HotelName", typeof(string)), + new VectorStoreRecordDataProperty("Tags", typeof(List)), + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)), + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) + ] + }; + + this._sut = new(new VectorStoreRecordPropertyReader(typeof(AzureCosmosDBMongoDBHotelModel), definition, null)); + } + + [Fact] + public void MapFromDataToStorageModelReturnsValidObject() + { + // Arrange + var hotel = new AzureCosmosDBMongoDBHotelModel("key") + { + HotelName = "Test Name", + Tags = ["tag1", "tag2"], + ParkingIncluded = true, + DescriptionEmbedding = new ReadOnlyMemory([1f, 2f, 3f]) + }; + + // Act + var document = this._sut.MapFromDataToStorageModel(hotel); + + // Assert + Assert.NotNull(document); + + Assert.Equal("key", document["_id"]); + Assert.Equal("Test Name", document["HotelName"]); + Assert.Equal(["tag1", "tag2"], document["Tags"].AsBsonArray); + Assert.True(document["parking_is_included"].AsBoolean); + Assert.Equal([1f, 2f, 3f], document["DescriptionEmbedding"].AsBsonArray); + } + + [Fact] + public void MapFromStorageToDataModelReturnsValidObject() + { + // Arrange + var document = new BsonDocument + { + ["_id"] = "key", + ["HotelName"] = "Test Name", + ["Tags"] = BsonArray.Create(new List { "tag1", "tag2" }), + ["parking_is_included"] = BsonValue.Create(true), + ["DescriptionEmbedding"] = BsonArray.Create(new List { 1f, 2f, 3f }) + }; + + // Act + var hotel = this._sut.MapFromStorageToDataModel(document, new()); + + // Assert + Assert.NotNull(hotel); + + Assert.Equal("key", hotel.HotelId); + Assert.Equal("Test Name", hotel.HotelName); + Assert.Equal(["tag1", "tag2"], hotel.Tags); + Assert.True(hotel.ParkingIncluded); + Assert.True(new ReadOnlyMemory([1f, 2f, 3f]).Span.SequenceEqual(hotel.DescriptionEmbedding!.Value.Span)); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs new file mode 100644 index 000000000000..3bc2049bf2c9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/AzureCosmosDBMongoDBVectorStoreTests.cs @@ -0,0 +1,103 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using MongoDB.Driver; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBMongoDBVectorStoreTests +{ + private readonly Mock _mockMongoDatabase = new(); + + [Fact] + public void GetCollectionWithNotSupportedKeyThrowsException() + { + // Arrange + var sut = new AzureCosmosDBMongoDBVectorStore(this._mockMongoDatabase.Object); + + // Act & Assert + Assert.Throws(() => sut.GetCollection("collection")); + } + + [Fact] + public void GetCollectionWithFactoryReturnsCustomCollection() + { + // Arrange + var mockFactory = new Mock(); + var mockRecordCollection = new Mock>(); + + mockFactory + .Setup(l => l.CreateVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection", + It.IsAny())) + .Returns(mockRecordCollection.Object); + + var sut = new AzureCosmosDBMongoDBVectorStore( + this._mockMongoDatabase.Object, + new AzureCosmosDBMongoDBVectorStoreOptions { VectorStoreCollectionFactory = mockFactory.Object }); + + // Act + var collection = sut.GetCollection("collection"); + + // Assert + Assert.Same(mockRecordCollection.Object, collection); + mockFactory.Verify(l => l.CreateVectorStoreRecordCollection( + this._mockMongoDatabase.Object, + "collection", + It.IsAny()), Times.Once()); + } + + [Fact] + public void GetCollectionWithoutFactoryReturnsDefaultCollection() + { + // Arrange + var sut = new AzureCosmosDBMongoDBVectorStore(this._mockMongoDatabase.Object); + + // Act + var collection = sut.GetCollection("collection"); + + // Assert + Assert.NotNull(collection); + } + + [Fact] + public async Task ListCollectionNamesReturnsCollectionNamesAsync() + { + // Arrange + var expectedCollectionNames = new List { "collection-1", "collection-2", "collection-3" }; + + var mockCursor = new Mock>(); + mockCursor + .SetupSequence(l => l.MoveNextAsync(It.IsAny())) + .ReturnsAsync(true) + .ReturnsAsync(false); + + mockCursor + .Setup(l => l.Current) + .Returns(expectedCollectionNames); + + this._mockMongoDatabase + .Setup(l => l.ListCollectionNamesAsync(It.IsAny(), It.IsAny())) + .ReturnsAsync(mockCursor.Object); + + var sut = new AzureCosmosDBMongoDBVectorStore(this._mockMongoDatabase.Object); + + // Act + var actualCollectionNames = await sut.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Equal(expectedCollectionNames, actualCollectionNames); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj new file mode 100644 index 000000000000..a31e4b802b52 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBMongoDB.UnitTests/Connectors.AzureCosmosDBMongoDB.UnitTests.csproj @@ -0,0 +1,32 @@ +๏ปฟ + + + SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests + SemanticKernel.Connectors.AzureCosmosDBMongoDB.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);SKEXP0001,SKEXP0020 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..d83950adc89b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLGenericDataModelMapperTests.cs @@ -0,0 +1,366 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBNoSQLGenericDataModelMapperTests +{ + private static readonly JsonSerializerOptions s_jsonSerializerOptions = JsonSerializerOptions.Default; + + private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), + new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), +#if NET5_0_OR_GREATER + new VectorStoreRecordVectorProperty("HalfVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableHalfVector", typeof(ReadOnlyMemory?)), +#endif + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("ByteVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableByteVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("SByteVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableSByteVector", typeof(ReadOnlyMemory?)), + }, + }; + + private static readonly Dictionary s_storagePropertyNames = + s_vectorStoreRecordDefinition.Properties.ToDictionary( + k => k.DataModelPropertyName, + v => v is VectorStoreRecordKeyProperty ? "id" : v.DataModelPropertyName); + +#if NET5_0_OR_GREATER + private static readonly Half[] s_halfVector = [(Half)1.0f, (Half)2.0f, (Half)3.0f]; +#endif + private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; + private static readonly byte[] s_byteVector = [1, 2, 3]; + private static readonly sbyte[] s_sbyteVector = [1, 2, 3]; + private static readonly List s_taglist = ["tag1", "tag2"]; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["TagListDataProp"] = s_taglist, + }, + Vectors = + { +#if NET5_0_OR_GREATER + ["HalfVector"] = new ReadOnlyMemory(s_halfVector), + ["NullableHalfVector"] = new ReadOnlyMemory(s_halfVector), +#endif + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), + ["ByteVector"] = new ReadOnlyMemory(s_byteVector), + ["NullableByteVector"] = new ReadOnlyMemory(s_byteVector), + ["SByteVector"] = new ReadOnlyMemory(s_sbyteVector), + ["NullableSByteVector"] = new ReadOnlyMemory(s_sbyteVector) + }, + }; + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", (string?)storageModel["id"]); + Assert.Equal(true, (bool?)storageModel["BoolDataProp"]); + Assert.Equal(false, (bool?)storageModel["NullableBoolDataProp"]); + Assert.Equal("string", (string?)storageModel["StringDataProp"]); + Assert.Equal(1, (int?)storageModel["IntDataProp"]); + Assert.Equal(2, (int?)storageModel["NullableIntDataProp"]); + Assert.Equal(3L, (long?)storageModel["LongDataProp"]); + Assert.Equal(4L, (long?)storageModel["NullableLongDataProp"]); + Assert.Equal(5.0f, (float?)storageModel["FloatDataProp"]); + Assert.Equal(6.0f, (float?)storageModel["NullableFloatDataProp"]); + Assert.Equal(7.0, (double?)storageModel["DoubleDataProp"]); + Assert.Equal(8.0, (double?)storageModel["NullableDoubleDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["NullableDateTimeOffsetDataProp"]); + Assert.Equal(s_taglist, storageModel["TagListDataProp"]!.AsArray().GetValues().ToArray()); +#if NET5_0_OR_GREATER + Assert.Equal(s_halfVector, storageModel["HalfVector"]!.AsArray().Select(l => (Half)(float)l!).ToArray()); + Assert.Equal(s_halfVector, storageModel["NullableHalfVector"]!.AsArray().Select(l => (Half)(float)l!).ToArray()); +#endif + Assert.Equal(s_floatVector, storageModel["FloatVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_floatVector, storageModel["NullableFloatVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_byteVector, storageModel["ByteVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_byteVector, storageModel["NullableByteVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_sbyteVector, storageModel["SByteVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_sbyteVector, storageModel["NullableSByteVector"]!.AsArray().GetValues().ToArray()); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + }, + Vectors = + { + ["NullableFloatVector"] = null, + }, + }; + + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Null(storageModel["StringDataProp"]); + Assert.Null(storageModel["NullableIntDataProp"]); + Assert.Null(storageModel["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var storageModel = new JsonObject + { + ["id"] = "key", + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["TagListDataProp"] = new JsonArray(s_taglist.Select(l => (JsonValue)l).ToArray()), +#if NET5_0_OR_GREATER + ["HalfVector"] = new JsonArray(s_halfVector.Select(l => (JsonValue)(float)l).ToArray()), + ["NullableHalfVector"] = new JsonArray(s_halfVector.Select(l => (JsonValue)(float)l).ToArray()), +#endif + ["FloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), + ["NullableFloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), + ["ByteVector"] = new JsonArray(s_byteVector.Select(l => (JsonValue)l).ToArray()), + ["NullableByteVector"] = new JsonArray(s_byteVector.Select(l => (JsonValue)l).ToArray()), + ["SByteVector"] = new JsonArray(s_sbyteVector.Select(l => (JsonValue)l).ToArray()), + ["NullableSByteVector"] = new JsonArray(s_sbyteVector.Select(l => (JsonValue)l).ToArray()) + }; + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Equal(true, dataModel.Data["BoolDataProp"]); + Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); + Assert.Equal("string", dataModel.Data["StringDataProp"]); + Assert.Equal(1, dataModel.Data["IntDataProp"]); + Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); + Assert.Equal(3L, dataModel.Data["LongDataProp"]); + Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); + Assert.Equal(5.0f, dataModel.Data["FloatDataProp"]); + Assert.Equal(6.0f, dataModel.Data["NullableFloatDataProp"]); + Assert.Equal(7.0, dataModel.Data["DoubleDataProp"]); + Assert.Equal(8.0, dataModel.Data["NullableDoubleDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2021, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["NullableDateTimeOffsetDataProp"]); + Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); +#if NET5_0_OR_GREATER + Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel.Vectors["HalfVector"]!).ToArray()); + Assert.Equal(s_halfVector, ((ReadOnlyMemory)dataModel.Vectors["NullableHalfVector"]!)!.ToArray()); +#endif + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); + Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel.Vectors["ByteVector"]!).ToArray()); + Assert.Equal(s_byteVector, ((ReadOnlyMemory)dataModel.Vectors["NullableByteVector"]!)!.ToArray()); + Assert.Equal(s_sbyteVector, ((ReadOnlyMemory)dataModel.Vectors["SByteVector"]!).ToArray()); + Assert.Equal(s_sbyteVector, ((ReadOnlyMemory)dataModel.Vectors["NullableSByteVector"]!)!.ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var storageModel = new JsonObject + { + ["id"] = "key", + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + ["NullableFloatVector"] = null + }; + + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Null(dataModel.Data["StringDataProp"]); + Assert.Null(dataModel.Data["NullableIntDataProp"]); + Assert.Null(dataModel.Vectors["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelThrowsForMissingKey() + { + // Arrange + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var storageModel = new JsonObject(); + + // Act & Assert + var exception = Assert.Throws( + () => sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true })); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key"); + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", (string?)storageModel["id"]); + Assert.False(storageModel.ContainsKey("StringDataProp")); + Assert.False(storageModel.ContainsKey("FloatVector")); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var storageModel = new JsonObject + { + ["id"] = "key" + }; + + var sut = new AzureCosmosDBNoSQLGenericDataModelMapper( + s_vectorStoreRecordDefinition.Properties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.False(dataModel.Data.ContainsKey("StringDataProp")); + Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs new file mode 100644 index 000000000000..951eca4bb016 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLHotel.cs @@ -0,0 +1,44 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Data; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +public class AzureCosmosDBNoSQLHotel(string hotelId) +{ + /// The key of the record. + [VectorStoreRecordKey] + public string HotelId { get; init; } = hotelId; + + /// A string metadata field. + [VectorStoreRecordData] + public string? HotelName { get; set; } + + /// An int metadata field. + [VectorStoreRecordData] + public int HotelCode { get; set; } + + /// A float metadata field. + [VectorStoreRecordData] + public float? HotelRating { get; set; } + + /// A bool metadata field. + [VectorStoreRecordData(StoragePropertyName = "parking_is_included")] + public bool ParkingIncluded { get; set; } + + /// An array metadata field. + [VectorStoreRecordData] + public List Tags { get; set; } = []; + + /// A data field. + [VectorStoreRecordData] + public string? Description { get; set; } + + /// A vector field. + [JsonPropertyName("description_embedding")] + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.Flat, DistanceFunction: DistanceFunction.CosineSimilarity)] + public ReadOnlyMemory? DescriptionEmbedding { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..e348fe136d58 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLKernelBuilderExtensionsTests.cs @@ -0,0 +1,54 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBNoSQLKernelBuilderExtensionsTests +{ + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + + [Fact] + public void AddVectorStoreRegistersClass() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(Mock.Of()); + + // Act + this._kernelBuilder.AddAzureCosmosDBNoSQLVectorStore(); + + var kernel = this._kernelBuilder.Build(); + var vectorStore = kernel.Services.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } + + [Fact] + public void AddVectorStoreWithConnectionStringRegistersClass() + { + // Act + this._kernelBuilder.AddAzureCosmosDBNoSQLVectorStore("AccountEndpoint=https://test.documents.azure.com:443/;AccountKey=mock;", "mydb"); + + var kernel = this._kernelBuilder.Build(); + var vectorStore = kernel.Services.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + var database = (Database)vectorStore.GetType().GetField("_database", BindingFlags.NonPublic | BindingFlags.Instance)!.GetValue(vectorStore)!; + Assert.Equal(HttpHeaderConstant.Values.UserAgent, database.Client.ClientOptions.ApplicationName); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..fd2f822bd934 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLServiceCollectionExtensionsTests.cs @@ -0,0 +1,54 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Reflection; +using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBNoSQLServiceCollectionExtensionsTests +{ + private readonly IServiceCollection _serviceCollection = new ServiceCollection(); + + [Fact] + public void AddVectorStoreRegistersClass() + { + // Arrange + this._serviceCollection.AddSingleton(Mock.Of()); + + // Act + this._serviceCollection.AddAzureCosmosDBNoSQLVectorStore(); + + var serviceProvider = this._serviceCollection.BuildServiceProvider(); + var vectorStore = serviceProvider.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } + + [Fact] + public void AddVectorStoreWithConnectionStringRegistersClass() + { + // Act + this._serviceCollection.AddAzureCosmosDBNoSQLVectorStore("AccountEndpoint=https://test.documents.azure.com:443/;AccountKey=mock;", "mydb"); + + var serviceProvider = this._serviceCollection.BuildServiceProvider(); + var vectorStore = serviceProvider.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + var database = (Database)vectorStore.GetType().GetField("_database", BindingFlags.NonPublic | BindingFlags.Instance)!.GetValue(vectorStore)!; + Assert.Equal(HttpHeaderConstant.Values.UserAgent, database.Client.ClientOptions.ApplicationName); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..52333e03b969 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -0,0 +1,647 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Moq; +using Xunit; +using DistanceFunction = Microsoft.SemanticKernel.Data.DistanceFunction; +using IndexKind = Microsoft.SemanticKernel.Data.IndexKind; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests +{ + private readonly Mock _mockDatabase = new(); + private readonly Mock _mockContainer = new(); + + public AzureCosmosDBNoSQLVectorStoreRecordCollectionTests() + { + this._mockDatabase + .Setup(l => l.GetContainer(It.IsAny())) + .Returns(this._mockContainer.Object); + } + + [Fact] + public void ConstructorForModelWithoutKeyThrowsException() + { + // Act & Assert + var exception = Assert.Throws(() => new AzureCosmosDBNoSQLVectorStoreRecordCollection(this._mockDatabase.Object, "collection")); + Assert.Contains("No key property found", exception.Message); + } + + [Fact] + public void ConstructorWithDeclarativeModelInitializesCollection() + { + // Act & Assert + var collection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + Assert.NotNull(collection); + } + + [Fact] + public void ConstructorWithImperativeModelInitializesCollection() + { + // Arrange + var definition = new VectorStoreRecordDefinition + { + Properties = [new VectorStoreRecordKeyProperty("Id", typeof(string))] + }; + + // Act + var collection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection", + new() { VectorStoreRecordDefinition = definition }); + + // Assert + Assert.NotNull(collection); + } + + [Theory] + [MemberData(nameof(CollectionExistsData))] + public async Task CollectionExistsReturnsValidResultAsync(List collections, string collectionName, bool expectedResult) + { + // Arrange + var mockFeedResponse = new Mock>(); + mockFeedResponse + .Setup(l => l.Resource) + .Returns(collections); + + var mockFeedIterator = new Mock>(); + mockFeedIterator + .SetupSequence(l => l.HasMoreResults) + .Returns(true) + .Returns(false); + + mockFeedIterator + .Setup(l => l.ReadNextAsync(It.IsAny())) + .ReturnsAsync(mockFeedResponse.Object); + + this._mockDatabase + .Setup(l => l.GetContainerQueryIterator( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(mockFeedIterator.Object); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + collectionName); + + // Act + var actualResult = await sut.CollectionExistsAsync(); + + // Assert + Assert.Equal(expectedResult, actualResult); + } + + [Theory] + [InlineData(IndexingMode.Consistent)] + [InlineData(IndexingMode.Lazy)] + [InlineData(IndexingMode.None)] + public async Task CreateCollectionUsesValidContainerPropertiesAsync(IndexingMode indexingMode) + { + // Arrange + const string CollectionName = "collection"; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + CollectionName, + new() { IndexingMode = indexingMode, Automatic = indexingMode != IndexingMode.None }); + + var expectedVectorEmbeddingPolicy = new VectorEmbeddingPolicy( + [ + new Embedding + { + DataType = VectorDataType.Float16, + Dimensions = 1, + DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction.Cosine, + Path = "/DescriptionEmbedding1" + }, + new Embedding + { + DataType = VectorDataType.Float32, + Dimensions = 2, + DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction.Cosine, + Path = "/DescriptionEmbedding2" + }, + new Embedding + { + DataType = VectorDataType.Uint8, + Dimensions = 3, + DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction.DotProduct, + Path = "/DescriptionEmbedding3" + }, + new Embedding + { + DataType = VectorDataType.Int8, + Dimensions = 4, + DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction.Euclidean, + Path = "/DescriptionEmbedding4" + }, + ]); + + var expectedIndexingPolicy = new IndexingPolicy + { + VectorIndexes = + [ + new VectorIndexPath { Type = VectorIndexType.Flat, Path = "/DescriptionEmbedding1" }, + new VectorIndexPath { Type = VectorIndexType.Flat, Path = "/DescriptionEmbedding2" }, + new VectorIndexPath { Type = VectorIndexType.QuantizedFlat, Path = "/DescriptionEmbedding3" }, + new VectorIndexPath { Type = VectorIndexType.DiskANN, Path = "/DescriptionEmbedding4" }, + ], + IndexingMode = indexingMode, + Automatic = indexingMode != IndexingMode.None + }; + + if (indexingMode != IndexingMode.None) + { + expectedIndexingPolicy.IncludedPaths.Add(new IncludedPath { Path = "/IndexableData1/?" }); + expectedIndexingPolicy.IncludedPaths.Add(new IncludedPath { Path = "/IndexableData2/?" }); + expectedIndexingPolicy.IncludedPaths.Add(new IncludedPath { Path = "/" }); + + expectedIndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/DescriptionEmbedding1/*" }); + expectedIndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/DescriptionEmbedding2/*" }); + expectedIndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/DescriptionEmbedding3/*" }); + expectedIndexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = "/DescriptionEmbedding4/*" }); + } + + var expectedContainerProperties = new ContainerProperties(CollectionName, "/id") + { + VectorEmbeddingPolicy = expectedVectorEmbeddingPolicy, + IndexingPolicy = expectedIndexingPolicy + }; + + // Act + await sut.CreateCollectionAsync(); + + // Assert + this._mockDatabase.Verify(l => l.CreateContainerAsync( + It.Is(properties => this.VerifyContainerProperties(expectedContainerProperties, properties)), + It.IsAny(), + It.IsAny(), + It.IsAny()), + Times.Once()); + } + + [Theory] + [MemberData(nameof(CreateCollectionIfNotExistsData))] + public async Task CreateCollectionIfNotExistsInvokesValidMethodsAsync(List collections, int actualCollectionCreations) + { + // Arrange + const string CollectionName = "collection"; + + var mockFeedResponse = new Mock>(); + mockFeedResponse + .Setup(l => l.Resource) + .Returns(collections); + + var mockFeedIterator = new Mock>(); + mockFeedIterator + .SetupSequence(l => l.HasMoreResults) + .Returns(true) + .Returns(false); + + mockFeedIterator + .Setup(l => l.ReadNextAsync(It.IsAny())) + .ReturnsAsync(mockFeedResponse.Object); + + this._mockDatabase + .Setup(l => l.GetContainerQueryIterator( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(mockFeedIterator.Object); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + CollectionName); + + // Act + await sut.CreateCollectionIfNotExistsAsync(); + + // Assert + this._mockDatabase.Verify(l => l.CreateContainerAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny()), + Times.Exactly(actualCollectionCreations)); + } + + [Theory] + [InlineData("recordKey", false)] + [InlineData("partitionKey", true)] + public async Task DeleteInvokesValidMethodsAsync( + string expectedPartitionKey, + bool useCompositeKeyCollection) + { + // Arrange + const string RecordKey = "recordKey"; + const string PartitionKey = "partitionKey"; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + if (useCompositeKeyCollection) + { + await ((IVectorStoreRecordCollection)sut).DeleteAsync( + new AzureCosmosDBNoSQLCompositeKey(RecordKey, PartitionKey)); + } + else + { + await ((IVectorStoreRecordCollection)sut).DeleteAsync( + RecordKey); + } + + // Assert + this._mockContainer.Verify(l => l.DeleteItemAsync( + RecordKey, + new PartitionKey(expectedPartitionKey), + It.IsAny(), + It.IsAny()), + Times.Once()); + } + + [Fact] + public async Task DeleteBatchInvokesValidMethodsAsync() + { + // Arrange + List recordKeys = ["key1", "key2"]; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + await sut.DeleteBatchAsync(recordKeys); + + // Assert + foreach (var key in recordKeys) + { + this._mockContainer.Verify(l => l.DeleteItemAsync( + key, + new PartitionKey(key), + It.IsAny(), + It.IsAny()), + Times.Once()); + } + } + + [Fact] + public async Task DeleteCollectionInvokesValidMethodsAsync() + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + await sut.DeleteCollectionAsync(); + + // Assert + this._mockContainer.Verify(l => l.DeleteContainerAsync( + It.IsAny(), + It.IsAny()), + Times.Once()); + } + + [Fact] + public async Task GetReturnsValidRecordAsync() + { + // Arrange + const string RecordKey = "key"; + + var jsonObject = new JsonObject { ["id"] = RecordKey, ["HotelName"] = "Test Name" }; + + var mockFeedResponse = new Mock>(); + mockFeedResponse + .Setup(l => l.Resource) + .Returns([jsonObject]); + + var mockFeedIterator = new Mock>(); + mockFeedIterator + .SetupSequence(l => l.HasMoreResults) + .Returns(true) + .Returns(false); + + mockFeedIterator + .Setup(l => l.ReadNextAsync(It.IsAny())) + .ReturnsAsync(mockFeedResponse.Object); + + this._mockContainer + .Setup(l => l.GetItemQueryIterator( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(mockFeedIterator.Object); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + var result = await sut.GetAsync(RecordKey); + + // Assert + Assert.NotNull(result); + Assert.Equal(RecordKey, result.HotelId); + Assert.Equal("Test Name", result.HotelName); + } + + [Fact] + public async Task GetBatchReturnsValidRecordAsync() + { + // Arrange + var jsonObject1 = new JsonObject { ["id"] = "key1", ["HotelName"] = "Test Name 1" }; + var jsonObject2 = new JsonObject { ["id"] = "key2", ["HotelName"] = "Test Name 2" }; + var jsonObject3 = new JsonObject { ["id"] = "key3", ["HotelName"] = "Test Name 3" }; + + var mockFeedResponse = new Mock>(); + mockFeedResponse + .Setup(l => l.Resource) + .Returns([jsonObject1, jsonObject2, jsonObject3]); + + var mockFeedIterator = new Mock>(); + mockFeedIterator + .SetupSequence(l => l.HasMoreResults) + .Returns(true) + .Returns(false); + + mockFeedIterator + .Setup(l => l.ReadNextAsync(It.IsAny())) + .ReturnsAsync(mockFeedResponse.Object); + + this._mockContainer + .Setup(l => l.GetItemQueryIterator( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(mockFeedIterator.Object); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + var results = await sut.GetBatchAsync(["key1", "key2", "key3"]).ToListAsync(); + + // Assert + Assert.NotNull(results[0]); + Assert.Equal("key1", results[0].HotelId); + Assert.Equal("Test Name 1", results[0].HotelName); + + Assert.NotNull(results[1]); + Assert.Equal("key2", results[1].HotelId); + Assert.Equal("Test Name 2", results[1].HotelName); + + Assert.NotNull(results[2]); + Assert.Equal("key3", results[2].HotelId); + Assert.Equal("Test Name 3", results[2].HotelName); + } + + [Fact] + public async Task UpsertReturnsRecordKeyAsync() + { + // Arrange + var hotel = new AzureCosmosDBNoSQLHotel("key") { HotelName = "Test Name" }; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + var result = await sut.UpsertAsync(hotel); + + // Assert + Assert.Equal("key", result); + + this._mockContainer.Verify(l => l.UpsertItemAsync( + It.Is(node => + node["id"]!.ToString() == "key" && + node["HotelName"]!.ToString() == "Test Name"), + new PartitionKey("key"), + It.IsAny(), + It.IsAny()), + Times.Once()); + } + + [Fact] + public async Task UpsertBatchReturnsRecordKeysAsync() + { + // Arrange + var hotel1 = new AzureCosmosDBNoSQLHotel("key1") { HotelName = "Test Name 1" }; + var hotel2 = new AzureCosmosDBNoSQLHotel("key2") { HotelName = "Test Name 2" }; + var hotel3 = new AzureCosmosDBNoSQLHotel("key3") { HotelName = "Test Name 3" }; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection"); + + // Act + var results = await sut.UpsertBatchAsync([hotel1, hotel2, hotel3]).ToListAsync(); + + // Assert + Assert.NotNull(results); + Assert.Equal(3, results.Count); + + Assert.Equal("key1", results[0]); + Assert.Equal("key2", results[1]); + Assert.Equal("key3", results[2]); + } + + [Fact] + public async Task UpsertWithCustomMapperWorksCorrectlyAsync() + { + // Arrange + var hotel = new AzureCosmosDBNoSQLHotel("key") { HotelName = "Test Name" }; + + var mockMapper = new Mock>(); + + mockMapper + .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) + .Returns(new JsonObject { ["id"] = "key", ["my_name"] = "Test Name" }); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection", + new() { JsonObjectCustomMapper = mockMapper.Object }); + + // Act + var result = await sut.UpsertAsync(hotel); + + // Assert + Assert.Equal("key", result); + + this._mockContainer.Verify(l => l.UpsertItemAsync( + It.Is(node => + node["id"]!.ToString() == "key" && + node["my_name"]!.ToString() == "Test Name"), + new PartitionKey("key"), + It.IsAny(), + It.IsAny()), + Times.Once()); + } + + [Fact] + public async Task GetWithCustomMapperWorksCorrectlyAsync() + { + // Arrange + const string RecordKey = "key"; + + var jsonObject = new JsonObject { ["id"] = RecordKey, ["HotelName"] = "Test Name" }; + + var mockFeedResponse = new Mock>(); + mockFeedResponse + .Setup(l => l.Resource) + .Returns([jsonObject]); + + var mockFeedIterator = new Mock>(); + mockFeedIterator + .SetupSequence(l => l.HasMoreResults) + .Returns(true) + .Returns(false); + + mockFeedIterator + .Setup(l => l.ReadNextAsync(It.IsAny())) + .ReturnsAsync(mockFeedResponse.Object); + + this._mockContainer + .Setup(l => l.GetItemQueryIterator( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(mockFeedIterator.Object); + + var mockMapper = new Mock>(); + + mockMapper + .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) + .Returns(new AzureCosmosDBNoSQLHotel(RecordKey) { HotelName = "Name from mapper" }); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection", + new() { JsonObjectCustomMapper = mockMapper.Object }); + + // Act + var result = await sut.GetAsync(RecordKey); + + // Assert + Assert.NotNull(result); + Assert.Equal(RecordKey, result.HotelId); + Assert.Equal("Name from mapper", result.HotelName); + } + + public static TheoryData, string, bool> CollectionExistsData => new() + { + { ["collection-2"], "collection-2", true }, + { [], "non-existent-collection", false } + }; + + public static TheoryData, int> CreateCollectionIfNotExistsData => new() + { + { ["collection"], 0 }, + { [], 1 } + }; + + #region + + private bool VerifyContainerProperties(ContainerProperties expected, ContainerProperties actual) + { + Assert.Equal(expected.Id, actual.Id); + Assert.Equal(expected.PartitionKeyPath, actual.PartitionKeyPath); + Assert.Equal(expected.IndexingPolicy.IndexingMode, actual.IndexingPolicy.IndexingMode); + Assert.Equal(expected.IndexingPolicy.Automatic, actual.IndexingPolicy.Automatic); + + for (var i = 0; i < expected.VectorEmbeddingPolicy.Embeddings.Count; i++) + { + var expectedEmbedding = expected.VectorEmbeddingPolicy.Embeddings[i]; + var actualEmbedding = actual.VectorEmbeddingPolicy.Embeddings[i]; + + Assert.Equal(expectedEmbedding.DataType, actualEmbedding.DataType); + Assert.Equal(expectedEmbedding.Dimensions, actualEmbedding.Dimensions); + Assert.Equal(expectedEmbedding.DistanceFunction, actualEmbedding.DistanceFunction); + Assert.Equal(expectedEmbedding.Path, actualEmbedding.Path); + } + + for (var i = 0; i < expected.IndexingPolicy.VectorIndexes.Count; i++) + { + var expectedIndexPath = expected.IndexingPolicy.VectorIndexes[i]; + var actualIndexPath = actual.IndexingPolicy.VectorIndexes[i]; + + Assert.Equal(expectedIndexPath.Type, actualIndexPath.Type); + Assert.Equal(expectedIndexPath.Path, actualIndexPath.Path); + } + + for (var i = 0; i < expected.IndexingPolicy.IncludedPaths.Count; i++) + { + var expectedIncludedPath = expected.IndexingPolicy.IncludedPaths[i].Path; + var actualIncludedPath = actual.IndexingPolicy.IncludedPaths[i].Path; + + Assert.Equal(expectedIncludedPath, actualIncludedPath); + } + + for (var i = 0; i < expected.IndexingPolicy.ExcludedPaths.Count; i++) + { + var expectedExcludedPath = expected.IndexingPolicy.ExcludedPaths[i].Path; + var actualExcludedPath = actual.IndexingPolicy.ExcludedPaths[i].Path; + + Assert.Equal(expectedExcludedPath, actualExcludedPath); + } + + return true; + } + +#pragma warning disable CA1812 + private sealed class TestModel + { + public string? Id { get; set; } + + public string? HotelName { get; set; } + } + + private sealed class TestIndexingModel + { + [VectorStoreRecordKey] + public string? Id { get; set; } + + [VectorStoreRecordVector(Dimensions: 1, IndexKind: IndexKind.Flat, DistanceFunction: DistanceFunction.CosineSimilarity)] + public ReadOnlyMemory? DescriptionEmbedding1 { get; set; } + + [VectorStoreRecordVector(Dimensions: 2, IndexKind: IndexKind.Flat, DistanceFunction: DistanceFunction.CosineSimilarity)] + public ReadOnlyMemory? DescriptionEmbedding2 { get; set; } + + [VectorStoreRecordVector(Dimensions: 3, IndexKind: IndexKind.QuantizedFlat, DistanceFunction: DistanceFunction.DotProductSimilarity)] + public ReadOnlyMemory? DescriptionEmbedding3 { get; set; } + + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.DiskAnn, DistanceFunction: DistanceFunction.EuclideanDistance)] + public ReadOnlyMemory? DescriptionEmbedding4 { get; set; } + + [VectorStoreRecordData(IsFilterable = true)] + public string? IndexableData1 { get; set; } + + [VectorStoreRecordData(IsFullTextSearchable = true)] + public string? IndexableData2 { get; set; } + + [VectorStoreRecordData] + public string? NonIndexableData1 { get; set; } + } +#pragma warning restore CA1812 + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs new file mode 100644 index 000000000000..9c2b7de29b41 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreRecordMapperTests.cs @@ -0,0 +1,79 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBNoSQLVectorStoreRecordMapperTests +{ + private readonly AzureCosmosDBNoSQLVectorStoreRecordMapper _sut; + + public AzureCosmosDBNoSQLVectorStoreRecordMapperTests() + { + var storagePropertyNames = new Dictionary + { + ["HotelId"] = "HotelId", + ["HotelName"] = "HotelName", + ["Tags"] = "Tags", + ["DescriptionEmbedding"] = "description_embedding", + }; + + this._sut = new("HotelId", storagePropertyNames, JsonSerializerOptions.Default); + } + + [Fact] + public void MapFromDataToStorageModelReturnsValidObject() + { + // Arrange + var hotel = new AzureCosmosDBNoSQLHotel("key") + { + HotelName = "Test Name", + Tags = ["tag1", "tag2"], + DescriptionEmbedding = new ReadOnlyMemory([1f, 2f, 3f]) + }; + + // Act + var document = this._sut.MapFromDataToStorageModel(hotel); + + // Assert + Assert.NotNull(document); + + Assert.Equal("key", document["id"]!.GetValue()); + Assert.Equal("Test Name", document["HotelName"]!.GetValue()); + Assert.Equal(["tag1", "tag2"], document["Tags"]!.AsArray().Select(l => l!.GetValue())); + Assert.Equal([1f, 2f, 3f], document["description_embedding"]!.AsArray().Select(l => l!.GetValue())); + } + + [Fact] + public void MapFromStorageToDataModelReturnsValidObject() + { + // Arrange + var document = new JsonObject + { + ["id"] = "key", + ["HotelName"] = "Test Name", + ["Tags"] = new JsonArray(new List { "tag1", "tag2" }.Select(l => JsonValue.Create(l)).ToArray()), + ["description_embedding"] = new JsonArray(new List { 1f, 2f, 3f }.Select(l => JsonValue.Create(l)).ToArray()), + }; + + // Act + var hotel = this._sut.MapFromStorageToDataModel(document, new()); + + // Assert + Assert.NotNull(hotel); + + Assert.Equal("key", hotel.HotelId); + Assert.Equal("Test Name", hotel.HotelName); + Assert.Equal(["tag1", "tag2"], hotel.Tags); + Assert.True(new ReadOnlyMemory([1f, 2f, 3f]).Span.SequenceEqual(hotel.DescriptionEmbedding!.Value.Span)); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs new file mode 100644 index 000000000000..79a74d541a86 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/AzureCosmosDBNoSQLVectorStoreTests.cs @@ -0,0 +1,126 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class AzureCosmosDBNoSQLVectorStoreTests +{ + private readonly Mock _mockDatabase = new(); + + [Fact] + public void GetCollectionWithNotSupportedKeyThrowsException() + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStore(this._mockDatabase.Object); + + // Act & Assert + Assert.Throws(() => sut.GetCollection("collection")); + } + + [Fact] + public void GetCollectionWithSupportedKeyReturnsCollection() + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStore(this._mockDatabase.Object); + + // Act + var collectionWithStringKey = sut.GetCollection("collection1"); + var collectionWithCompositeKey = sut.GetCollection("collection1"); + + // Assert + Assert.NotNull(collectionWithStringKey); + Assert.NotNull(collectionWithCompositeKey); + } + + [Fact] + public void GetCollectionWithFactoryReturnsCustomCollection() + { + // Arrange + var mockFactory = new Mock(); + var mockRecordCollection = new Mock>(); + + mockFactory + .Setup(l => l.CreateVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection", + It.IsAny())) + .Returns(mockRecordCollection.Object); + + var sut = new AzureCosmosDBNoSQLVectorStore( + this._mockDatabase.Object, + new AzureCosmosDBNoSQLVectorStoreOptions { VectorStoreCollectionFactory = mockFactory.Object }); + + // Act + var collection = sut.GetCollection("collection"); + + // Assert + Assert.Same(mockRecordCollection.Object, collection); + mockFactory.Verify(l => l.CreateVectorStoreRecordCollection( + this._mockDatabase.Object, + "collection", + It.IsAny()), Times.Once()); + } + + [Fact] + public void GetCollectionWithoutFactoryReturnsDefaultCollection() + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStore(this._mockDatabase.Object); + + // Act + var collection = sut.GetCollection("collection"); + + // Assert + Assert.NotNull(collection); + } + + [Fact] + public async Task ListCollectionNamesReturnsCollectionNamesAsync() + { + // Arrange + var expectedCollectionNames = new List { "collection-1", "collection-2", "collection-3" }; + + var mockFeedResponse = new Mock>(); + mockFeedResponse + .Setup(l => l.Resource) + .Returns(expectedCollectionNames); + + var mockFeedIterator = new Mock>(); + mockFeedIterator + .SetupSequence(l => l.HasMoreResults) + .Returns(true) + .Returns(false); + + mockFeedIterator + .Setup(l => l.ReadNextAsync(It.IsAny())) + .ReturnsAsync(mockFeedResponse.Object); + + this._mockDatabase + .Setup(l => l.GetContainerQueryIterator( + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(mockFeedIterator.Object); + + var sut = new AzureCosmosDBNoSQLVectorStore(this._mockDatabase.Object); + + // Act + var actualCollectionNames = await sut.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Equal(expectedCollectionNames, actualCollectionNames); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj new file mode 100644 index 000000000000..ff8643740f11 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureCosmosDBNoSQL.UnitTests/Connectors.AzureCosmosDBNoSQL.UnitTests.csproj @@ -0,0 +1,32 @@ + + + + SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests + SemanticKernel.Connectors.AzureCosmosDBNoSQL.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);SKEXP0001,SKEXP0020 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs new file mode 100644 index 000000000000..31a7654fcfc6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs @@ -0,0 +1,30 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.IO; +using System.Net.Http; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests; + +/// +/// Helper for AzureOpenAI test purposes. +/// +internal static class AzureOpenAITestHelper +{ + /// + /// Reads test response from file for mocking purposes. + /// + /// Name of the file with test response. + internal static string GetTestResponse(string fileName) + { + return File.ReadAllText($"./TestData/{fileName}"); + } + + /// + /// Reads test response from file and create . + /// + /// Name of the file with test response. + internal static StreamContent GetTestResponseAsStream(string fileName) + { + return new StreamContent(File.OpenRead($"./TestData/{fileName}")); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj new file mode 100644 index 000000000000..a0a695a6719c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj @@ -0,0 +1,47 @@ +๏ปฟ + + + + SemanticKernel.Connectors.AzureOpenAI.UnitTests + $(AssemblyName) + net8.0 + true + enable + false + $(NoWarn);SKEXP0001;SKEXP0010;CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,IDE1006 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + Always + + + + diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..90d124ca15c5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs @@ -0,0 +1,190 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions; + +/// +/// Unit tests for the kernel builder extensions in the class. +/// +public sealed class AzureOpenAIKernelBuilderExtensionsTests +{ + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://localhost"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"), + _ => builder + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is AzureOpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is AzureOpenAIChatCompletionService); + } + + #endregion + + #region Text embeddings + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://localhost"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextEmbeddingGenerationService); + } + + #endregion + + #region Text to audio + + [Fact] + public void KernelBuilderAddAzureOpenAITextToAudioAddsValidService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key") + .Build() + .GetRequiredService(); + + // Assert + Assert.IsType(service); + } + + #endregion + + #region Text to image + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderExtensionsAddAzureOpenAITextToImageService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://localhost"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAITextToImage("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAITextToImage("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.True(service is AzureOpenAITextToImageService); + } + + #endregion + + #region Audio to text + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void KernelBuilderAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://endpoint"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.AddAzureOpenAIAudioToText("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.AddAzureOpenAIAudioToText("deployment-name"), + _ => builder + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.IsType(service); + } + + #endregion + + public enum InitializationType + { + ApiKey, + TokenCredential, + ClientInline, + ClientInServiceProvider, + ClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..5e3120c0aa8e --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs @@ -0,0 +1,190 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions; + +/// +/// Unit tests for the service collection extensions in the class. +/// +public sealed class AzureOpenAIServiceCollectionExtensionsTests +{ + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://localhost"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"), + _ => builder.Services + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is AzureOpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is AzureOpenAIChatCompletionService); + } + + #endregion + + #region Text embeddings + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://localhost"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.NotNull(service); + Assert.True(service is AzureOpenAITextEmbeddingGenerationService); + } + + #endregion + + #region Text to audio + + [Fact] + public void ServiceCollectionAddAzureOpenAITextToAudioAddsValidService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.IsType(service); + } + + #endregion + + #region Text to image + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionExtensionsAddAzureOpenAITextToImageService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("https://localhost"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAITextToImage("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAITextToImage("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.True(service is AzureOpenAITextToImageService); + } + + #endregion + + #region Audio to text + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.TokenCredential)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ServiceCollectionAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) + { + // Arrange + var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); + var client = new AzureOpenAIClient(new Uri("http://endpoint"), new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), + InitializationType.TokenCredential => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), + InitializationType.ClientInline => builder.Services.AddAzureOpenAIAudioToText("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAIAudioToText("deployment-name"), + _ => builder.Services + }; + + // Assert + var service = builder.Build().GetRequiredService(); + + Assert.True(service is AzureOpenAIAudioToTextService); + } + + #endregion + + public enum InitializationType + { + ApiKey, + TokenCredential, + ClientInline, + ClientInServiceProvider, + ClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs similarity index 55% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs index 6100c434c878..4254bdbe4516 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs @@ -1,17 +1,20 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.ClientModel; using System.Net.Http; +using System.Text; using System.Threading.Tasks; using Azure.AI.OpenAI; using Azure.Core; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; using Moq; -using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText; +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; /// /// Unit tests for class. @@ -36,12 +39,12 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act var service = includeLoggerFactory ? - new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id"); + new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id"); // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]); } [Theory] @@ -56,8 +59,8 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto new AzureOpenAIAudioToTextService("deployment", "https://endpoint", credentials, "model-id"); // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]); } [Theory] @@ -66,14 +69,26 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act - var client = new OpenAIClient("key"); + var client = new AzureOpenAIClient(new Uri("http://host"), new ApiKeyCredential("key")); var service = includeLoggerFactory ? new AzureOpenAIAudioToTextService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : new AzureOpenAIAudioToTextService("deployment", client, "model-id"); // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]); + } + + [Fact] + public void ItThrowsIfDeploymentNameIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new AzureOpenAIAudioToTextService(" ", "http://host", "apikey")); + Assert.Throws(() => new AzureOpenAIAudioToTextService(" ", azureOpenAIClient: new(new Uri("http://host"), new ApiKeyCredential("apikey")))); + Assert.Throws(() => new AzureOpenAIAudioToTextService("", "http://host", "apikey")); + Assert.Throws(() => new AzureOpenAIAudioToTextService("", azureOpenAIClient: new(new Uri("http://host"), new ApiKeyCredential("apikey")))); + Assert.Throws(() => new AzureOpenAIAudioToTextService(null!, "http://host", "apikey")); + Assert.Throws(() => new AzureOpenAIAudioToTextService(null!, azureOpenAIClient: new(new Uri("http://host"), new ApiKeyCredential("apikey")))); } [Theory] @@ -81,7 +96,7 @@ public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) public async Task GetTextContentWithInvalidSettingsThrowsExceptionAsync(OpenAIAudioToTextExecutionSettings? settings, Type expectedExceptionType) { // Arrange - var service = new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + var service = new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent("Test audio-to-text response") @@ -95,6 +110,34 @@ public async Task GetTextContentWithInvalidSettingsThrowsExceptionAsync(OpenAIAu Assert.IsType(expectedExceptionType, exception); } + [Theory] + [InlineData("verbose_json")] + [InlineData("json")] + [InlineData("vtt")] + [InlineData("srt")] + public async Task ItRespectResultFormatExecutionSettingAsync(string format) + { + // Arrange + var service = new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent("Test audio-to-text response") + }; + + // Act + var settings = new OpenAIAudioToTextExecutionSettings("file.mp3") { ResponseFormat = format }; + var result = await service.GetTextContentsAsync(new AudioContent(new BinaryData("data"), mimeType: null), settings); + + // Assert + Assert.NotNull(this._messageHandlerStub.RequestContent); + Assert.NotNull(result); + + var multiPartData = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + var multiPartBreak = multiPartData.Substring(0, multiPartData.IndexOf("\r\n", StringComparison.OrdinalIgnoreCase)); + + Assert.Contains($"{format}\r\n{multiPartBreak}", multiPartData); + } + [Fact] public async Task GetTextContentByDefaultWorksCorrectlyAsync() { diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs similarity index 50% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs index 22be8458c2cc..c1e3fe96526f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.ClientModel; using System.Collections.Generic; using System.IO; using System.Linq; @@ -10,16 +11,20 @@ using System.Text.Json; using System.Threading.Tasks; using Azure.AI.OpenAI; +using Azure.AI.OpenAI.Chat; using Azure.Core; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Moq; -using Xunit; +using OpenAI.Chat; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; +using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; /// /// Unit tests for @@ -80,7 +85,7 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act - var client = new OpenAIClient("key"); + var client = new AzureOpenAIClient(new Uri("http://host"), new ApiKeyCredential("apikey")); var service = includeLoggerFactory ? new AzureOpenAIChatCompletionService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : new AzureOpenAIChatCompletionService("deployment", client, "model-id"); @@ -95,10 +100,12 @@ public async Task GetTextContentsWorksCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); // Act var result = await service.GetTextContentsAsync("Prompt"); @@ -107,43 +114,12 @@ public async Task GetTextContentsWorksCorrectlyAsync() Assert.True(result.Count > 0); Assert.Equal("Test chat response", result[0].Text); - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + var usage = result[0].Metadata?["Usage"] as ChatTokenUsage; Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetChatMessageContentsWithEmptyChoicesThrowsExceptionAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent("{\"id\":\"response-id\",\"object\":\"chat.completion\",\"created\":1704208954,\"model\":\"gpt-4\",\"choices\":[],\"usage\":{\"prompt_tokens\":55,\"completion_tokens\":100,\"total_tokens\":155},\"system_fingerprint\":null}") - }); - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([])); - - Assert.Equal("Chat completions not found", exception.Message); - } - - [Theory] - [InlineData(0)] - [InlineData(129)] - public async Task GetChatMessageContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt) - { - // Arrange - var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([], settings)); - - Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase); + Assert.Equal(55, usage.InputTokenCount); + Assert.Equal(100, usage.OutputTokenCount); + Assert.Equal(155, usage.TotalTokenCount); } [Fact] @@ -151,30 +127,26 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() + var settings = new AzureOpenAIPromptExecutionSettings() { MaxTokens = 123, Temperature = 0.6, TopP = 0.5, FrequencyPenalty = 1.6, PresencePenalty = 1.2, - ResultsPerPrompt = 5, Seed = 567, TokenSelectionBiases = new Dictionary { { 2, 3 } }, StopSequences = ["stop_sequence"], Logprobs = true, TopLogprobs = 5, - AzureChatExtensionsOptions = new AzureChatExtensionsOptions +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + AzureChatDataSource = new AzureSearchChatDataSource() { - Extensions = - { - new AzureSearchChatExtensionConfiguration - { - SearchEndpoint = new Uri("http://test-search-endpoint"), - IndexName = "test-index-name" - } - } + Endpoint = new Uri("http://test-search-endpoint"), + IndexName = "test-index-name", + Authentication = DataSourceAuthentication.FromApiKey("api-key"), } +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. }; var chatHistory = new ChatHistory(); @@ -183,10 +155,11 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() chatHistory.AddSystemMessage("System Message"); chatHistory.AddAssistantMessage("Assistant Message"); - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); // Act var result = await service.GetChatMessageContentsAsync(chatHistory, settings); @@ -227,7 +200,6 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync() Assert.Equal(0.5, content.GetProperty("top_p").GetDouble()); Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble()); Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble()); - Assert.Equal(5, content.GetProperty("n").GetInt32()); Assert.Equal(567, content.GetProperty("seed").GetInt32()); Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32()); Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString()); @@ -249,18 +221,19 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings + var settings = new AzureOpenAIPromptExecutionSettings { ResponseFormat = responseFormat }; - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); // Act - var result = await service.GetChatMessageContentsAsync([], settings); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings); // Assert var requestContent = this._messageHandlerStub.RequestContents[0]; @@ -279,28 +252,29 @@ public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior beh // Arrange var kernel = Kernel.CreateBuilder().Build(); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = behavior }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = behavior }; - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.True(result.Count > 0); Assert.Equal("Test chat response", result[0].Content); - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; + var usage = result[0].Metadata?["Usage"] as ChatTokenUsage; Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); + Assert.Equal(55, usage.InputTokenCount); + Assert.Equal(100, usage.OutputTokenCount); + Assert.Equal(155, usage.TotalTokenCount); - Assert.Equal("stop", result[0].Metadata?["FinishReason"]); + Assert.Equal("Stop", result[0].Metadata?["FinishReason"]); } [Fact] @@ -325,15 +299,15 @@ public async Task GetChatMessageContentsWithFunctionCallAsync() kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.True(result.Count > 0); @@ -361,22 +335,29 @@ public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttempt kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; var responses = new List(); - for (var i = 0; i < ModelResponsesCount; i++) + try { - responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }); - } + for (var i = 0; i < ModelResponsesCount; i++) + { + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }); + } - this._messageHandlerStub.ResponsesToReturn = responses; + this._messageHandlerStub.ResponsesToReturn = responses; - // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + // Act + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); - // Assert - Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + // Assert + Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + } + finally + { + responses.ForEach(r => r.Dispose()); + } } [Fact] @@ -398,15 +379,15 @@ public async Task GetChatMessageContentsWithRequiredFunctionCallAsync() kernel.Plugins.Add(plugin); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; // Act - var result = await service.GetChatMessageContentsAsync([], settings, kernel); + var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel); // Assert Assert.Equal(1, functionCallCount); @@ -434,12 +415,13 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(stream) - }); + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); // Act & Assert var enumerator = service.GetStreamingTextContentsAsync("Prompt").GetAsyncEnumerator(); @@ -448,21 +430,82 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync() Assert.Equal("Test chat streaming response", enumerator.Current.Text); await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); } [Fact] - public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + public async Task GetStreamingChatContentsWithAsynchronousFilterWorksCorrectlyAsync() { // Arrange var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_async_filter_response.txt"))); this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(stream) }); + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync("Prompt").GetAsyncEnumerator(); + +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + await enumerator.MoveNextAsync(); + var message = enumerator.Current; + + Assert.IsType(message.InnerContent); + var update = (StreamingChatCompletionUpdate)message.InnerContent; + var promptResults = update.GetRequestContentFilterResult(); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Violence.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.SelfHarm.Severity); + Assert.False(promptResults.Jailbreak.Detected); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + + var filterResults = update.GetResponseContentFilterResult(); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.SelfHarm.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Violence.Severity); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + filterResults = update.GetResponseContentFilterResult(); + Assert.False(filterResults.ProtectedMaterialCode.Detected); + Assert.False(filterResults.ProtectedMaterialText.Detected); +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + } + + [Fact] + public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); + + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); + // Act & Assert var enumerator = service.GetStreamingChatMessageContentsAsync([]).GetAsyncEnumerator(); @@ -470,7 +513,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() Assert.Equal("Test chat streaming response", enumerator.Current.Content); await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); } [Fact] @@ -495,10 +538,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_multiple_function_calls_test_response.txt")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_multiple_function_calls_test_response.txt") }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; @@ -507,10 +550,101 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() await enumerator.MoveNextAsync(); Assert.Equal("Test chat streaming response", enumerator.Current.Content); - Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + await enumerator.MoveNextAsync(); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + // Keep looping until the end of stream + while (await enumerator.MoveNextAsync()) + { + } + + Assert.Equal(2, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallAsyncFilterAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function1 = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var function2 = KernelFunctionFactory.CreateFromMethod((string argument) => + { + functionCallCount++; + throw new ArgumentException("Some exception"); + }, "FunctionWithException"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); + + var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_multiple_function_calls_test_async_filter_response.txt") }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") }; + + this._messageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator(); + await enumerator.MoveNextAsync(); + var message = enumerator.Current; + + Assert.IsType(message.InnerContent); + var update = (StreamingChatCompletionUpdate)message.InnerContent; +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + var promptResults = update.GetRequestContentFilterResult(); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.Violence.Severity); + Assert.Equal(ContentFilterSeverity.Safe, promptResults.SelfHarm.Severity); + Assert.False(promptResults.Jailbreak.Detected); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + Assert.Equal("Test chat streaming response", message.Content); + Assert.Equal("ToolCalls", message.Metadata?["FinishReason"]); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + Assert.Equal("ToolCalls", message.Metadata?["FinishReason"]); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + Assert.Equal("ToolCalls", message.Metadata?["FinishReason"]); await enumerator.MoveNextAsync(); - Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]); + message = enumerator.Current; + Assert.Equal("ToolCalls", message.Metadata?["FinishReason"]); + + // Async Filter Final Chunks + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + + var filterResults = update.GetResponseContentFilterResult(); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Hate.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Sexual.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.SelfHarm.Severity); + Assert.Equal(ContentFilterSeverity.Safe, filterResults.Violence.Severity); + + await enumerator.MoveNextAsync(); + message = enumerator.Current; + + Assert.IsType(message.InnerContent); + update = (StreamingChatCompletionUpdate)message.InnerContent; + filterResults = update.GetResponseContentFilterResult(); + Assert.False(filterResults.ProtectedMaterialCode.Detected); + Assert.False(filterResults.ProtectedMaterialText.Detected); // Keep looping until the end of stream while (await enumerator.MoveNextAsync()) @@ -518,6 +652,7 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() } Assert.Equal(2, functionCallCount); +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. } [Fact] @@ -539,24 +674,31 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvo kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; var responses = new List(); - for (var i = 0; i < ModelResponsesCount; i++) + try { - responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) }); - } + for (var i = 0; i < ModelResponsesCount; i++) + { + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") }); + } - this._messageHandlerStub.ResponsesToReturn = responses; + this._messageHandlerStub.ResponsesToReturn = responses; - // Act & Assert - await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + + Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + } + finally { - Assert.Equal("Test chat streaming response", chunk.Content); + responses.ForEach(r => r.Dispose()); } - - Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); } [Fact] @@ -578,10 +720,10 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() kernel.Plugins.Add(plugin); var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; @@ -591,7 +733,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() // Function Tool Call Streaming (One Chunk) await enumerator.MoveNextAsync(); Assert.Equal("Test chat streaming response", enumerator.Current.Content); - Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); // Chat Completion Streaming (1st Chunk) await enumerator.MoveNextAsync(); @@ -599,7 +741,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() // Chat Completion Streaming (2nd Chunk) await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); Assert.Equal(1, functionCallCount); @@ -629,12 +771,13 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync() const string SystemMessage = "This is test system message"; var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); IKernelBuilder builder = Kernel.CreateBuilder(); builder.Services.AddTransient((sp) => service); @@ -673,12 +816,13 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS const string CollectionItemPrompt = "This is collection item prompt"; var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage(Prompt); @@ -727,17 +871,18 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfTypeFunctionCallContentAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Fake prompt"); - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act var result = await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -789,10 +934,11 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT public async Task FunctionCallsShouldBeReturnedToLLMAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -807,7 +953,7 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync() new ChatMessageContent(AuthorRole.Assistant, items) ]; - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -847,10 +993,11 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync() public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -866,7 +1013,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn ]) }; - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -895,10 +1042,11 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessageAsync() { // Arrange - this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK) + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) - }); + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); @@ -911,7 +1059,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage ]) }; - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; // Act await sut.GetChatMessageContentAsync(chatHistory, settings); @@ -936,6 +1084,295 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); } + [Fact] + public async Task GetChatMessageContentShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_single_function_call_test_response.json")) }; + this._messageHandlerStub.ResponsesToReturn.Add(firstResponse); + + using var secondResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponsesToReturn.Add(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + + [Fact] + public async Task GetStreamingChatMessageContentsShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }; + this._messageHandlerStub.ResponsesToReturn.Add(firstResponse); + + using var secondResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + this._messageHandlerStub.ResponsesToReturn.Add(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await foreach (var update in sut.GetStreamingChatMessageContentsAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel)) + { + } + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingAutoFunctionChoiceBehaviorAsync() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("TimePlugin", [ + KernelFunctionFactory.CreateFromMethod(() => { }, "Date"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Now") + ]); + + var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + // Act + await sut.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("auto", optionsJson.GetProperty("tool_choice").ToString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingNoneFunctionChoiceBehaviorAsync() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("TimePlugin", [ + KernelFunctionFactory.CreateFromMethod(() => { }, "Date"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Now") + ]); + + var chatCompletion = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("none", optionsJson.GetProperty("tool_choice").ToString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingRequiredFunctionChoiceBehaviorAsync() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("TimePlugin", [ + KernelFunctionFactory.CreateFromMethod(() => { }, "Date"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Now") + ]); + + var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required() }; + + // Act + await sut.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("required", optionsJson.GetProperty("tool_choice").ToString()); + } + + [Fact] + public async Task ItDoesNotChangeDefaultsForToolsAndChoiceIfNeitherOfFunctionCallingConfigurationsSetAsync() + { + // Arrange + var kernel = new Kernel(); + + var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient); + + using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) + }; + this._messageHandlerStub.ResponsesToReturn.Add(responseMessage); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings(); // Neither ToolCallBehavior nor FunctionChoiceBehavior is set. + + // Act + await sut.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.False(optionsJson.TryGetProperty("tools", out var _)); + Assert.False(optionsJson.TryGetProperty("tool_choice", out var _)); + } + public void Dispose() { this._httpClient.Dispose(); @@ -950,10 +1387,27 @@ public void Dispose() public static TheoryData ResponseFormats => new() { - { new FakeChatCompletionsResponseFormat(), null }, { "json_object", "json_object" }, { "text", "text" } }; - private sealed class FakeChatCompletionsResponseFormat : ChatCompletionsResponseFormat; + private sealed class AutoFunctionInvocationFilter : IAutoFunctionInvocationFilter + { + private readonly Func, Task> _callback; + + public AutoFunctionInvocationFilter(Func, Task> callback) + { + this._callback = callback; + } + + public AutoFunctionInvocationFilter(Action> callback) + { + this._callback = (c, n) => { callback(c, n); return Task.CompletedTask; }; + } + + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + await this._callback(context, next); + } + } } diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs new file mode 100644 index 000000000000..4e8a12b9b69b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextEmbeddingGenerationServiceTests.cs @@ -0,0 +1,103 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Services; +using Moq; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public class AzureOpenAITextEmbeddingGenerationServiceTests +{ + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextEmbeddingGenerationServiceTests() + { + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected(bool includeLoggerFactory) + { + // Arrange + var sut = includeLoggerFactory ? + new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", modelId: "model", dimensions: 2, loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", modelId: "model", dimensions: 2); + var sutWithAzureOpenAIClient = new AzureOpenAITextEmbeddingGenerationService("deployment-name", new AzureOpenAIClient(new Uri("https://endpoint"), new ApiKeyCredential("apiKey")), modelId: "model", dimensions: 2, loggerFactory: this._mockLoggerFactory.Object); + + // Assert + Assert.NotNull(sut); + Assert.NotNull(sutWithAzureOpenAIClient); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("model", sutWithAzureOpenAIClient.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public async Task ItGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsEmpty() + { + // Arrange + var sut = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key"); + + // Act + var result = await sut.GenerateEmbeddingsAsync([], null, CancellationToken.None); + + // Assert + Assert.Empty(result); + } + + [Fact] + public async Task GetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", httpClient: client); + + // Act + var result = await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None); + + // Assert + Assert.Single(result); + Assert.Equal(4, result[0].Length); + } + + [Fact] + public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-multiple-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", httpClient: client); + + // Act & Assert + await Assert.ThrowsAsync(async () => await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None)); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs new file mode 100644 index 000000000000..c087b7a28d41 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToAudioServiceTests.cs @@ -0,0 +1,215 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Moq; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextToAudioServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextToAudioServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorsAddRequiredMetadata(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : + new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id"); + + // Assert + Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("deployment-name", service.Attributes["DeploymentName"]); + } + + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new AzureOpenAITextToAudioService(null!, "https://endpoint", "api-key")); + Assert.Throws(() => new AzureOpenAITextToAudioService("", "https://endpoint", "api-key")); + Assert.Throws(() => new AzureOpenAITextToAudioService(" ", "https://endpoint", "api-key")); + } + + [Fact] + public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync() + { + // Arrange + var settingsWithInvalidVoice = new OpenAITextToAudioExecutionSettings(""); + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(new byte[] { 0x00, 0x00, 0xFF, 0x7F }); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + await Assert.ThrowsAsync(() => service.GetAudioContentsAsync("Some text", settingsWithInvalidVoice)); + } + + [Fact] + public async Task GetAudioContentByDefaultWorksCorrectlyAsync() + { + // Arrange + var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("Nova")); + + // Assert + var audioData = result[0].Data!.Value; + Assert.False(audioData.IsEmpty); + Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); + } + + [Theory] + [InlineData("echo", "wav")] + [InlineData("fable", "opus")] + [InlineData("onyx", "flac")] + [InlineData("nova", "aac")] + [InlineData("shimmer", "pcm")] + public async Task GetAudioContentVoicesWorksCorrectlyAsync(string voice, string format) + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings(voice) { ResponseFormat = format }); + + // Assert + var requestBody = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent!); + Assert.NotNull(requestBody); + Assert.Equal(voice, requestBody["voice"]?.ToString()); + Assert.Equal(format, requestBody["response_format"]?.ToString()); + + var audioData = result[0].Data!.Value; + Assert.False(audioData.IsEmpty); + Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); + } + + [Fact] + public async Task GetAudioContentThrowsWhenVoiceIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice"))); + } + + [Fact] + public async Task GetAudioContentThrowsWhenFormatIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings() { ResponseFormat = "not supported" })); + } + + [Theory] + [InlineData(true, "http://local-endpoint")] + [InlineData(false, "https://endpoint")] + public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAddress, string expectedBaseAddress) + { + // Arrange + var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + + if (useHttpClientBaseAddress) + { + this._httpClient.BaseAddress = new Uri("http://local-endpoint/path"); + } + + var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint/path", "api-key", "model-id", this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("Nova")); + + // Assert + Assert.StartsWith(expectedBaseAddress, this._messageHandlerStub.RequestUri!.AbsoluteUri, StringComparison.InvariantCulture); + } + + [Theory] + [InlineData("model-1", "model-2", "deployment", "model-2")] + [InlineData("model-1", null, "deployment", "model-1")] + [InlineData(null, "model-2", "deployment", "model-2")] + [InlineData(null, null, "deployment", "deployment")] + public async Task GetAudioContentPrioritizesModelIdOverDeploymentNameAsync(string? modelInSettings, string? modelInConstructor, string deploymentName, string expectedModel) + { + // Arrange + var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + + var service = new AzureOpenAITextToAudioService(deploymentName, "https://endpoint", "api-key", modelInConstructor, this._httpClient); + await using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("Nova") { ModelId = modelInSettings }); + + // Assert + var requestBody = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent!); + Assert.Equal(expectedModel, requestBody?["model"]?.ToString()); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs new file mode 100644 index 000000000000..5e8b452d5913 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAITextToImageServiceTests.cs @@ -0,0 +1,345 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.IO; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextToImage; +using Moq; +using OpenAI.Images; + +#pragma warning disable CS0618 // Type or member is obsolete + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public sealed class AzureOpenAITextToImageServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public AzureOpenAITextToImageServiceTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-to-image-response.json")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Fact] + public void ConstructorsAddRequiredMetadata() + { + // Case #1 + var sut = new AzureOpenAITextToImageService("deployment", "https://api-host/", "api-key", "model", loggerFactory: this._mockLoggerFactory.Object); + Assert.Equal("deployment", sut.Attributes[AzureClientCore.DeploymentNameKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + + // Case #2 + sut = new AzureOpenAITextToImageService("deployment", "https://api-hostapi/", new Mock().Object, "model", loggerFactory: this._mockLoggerFactory.Object); + Assert.Equal("deployment", sut.Attributes[AzureClientCore.DeploymentNameKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + + // Case #3 + sut = new AzureOpenAITextToImageService("deployment", new AzureOpenAIClient(new Uri("https://api-host/"), new ApiKeyCredential("api-key")), "model", loggerFactory: this._mockLoggerFactory.Object); + Assert.Equal("deployment", sut.Attributes[AzureClientCore.DeploymentNameKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(256, 256, "dall-e-2")] + [InlineData(512, 512, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-3")] + [InlineData(1024, 1792, "dall-e-3")] + [InlineData(1792, 1024, "dall-e-3")] + [InlineData(123, 321, "custom-model-1")] + [InlineData(179, 124, "custom-model-2")] + public async Task GenerateImageWorksCorrectlyAsync(int width, int height, string modelId) + { + // Arrange + var sut = new AzureOpenAITextToImageService("deployment", "https://api-host", "api-key", modelId, this._httpClient, loggerFactory: this._mockLoggerFactory.Object); + + // Act + var result = await sut.GenerateImageAsync("description", width, height); + + // Assert + Assert.Equal("https://image-url/", result); + + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); // {"prompt":"description","model":"deployment","response_format":"url","size":"179x124"} + Assert.NotNull(request); + Assert.Equal("description", request["prompt"]?.ToString()); + Assert.Equal("deployment", request["model"]?.ToString()); + Assert.Null(request["response_format"]); + Assert.Equal($"{width}x{height}", request["size"]?.ToString()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItShouldUseProvidedEndpoint(bool useTokeCredential) + { + // Arrange + var sut = useTokeCredential ? + new AzureOpenAITextToImageService("deployment", endpoint: "https://api-host", new Mock().Object, "dall-e-3", this._httpClient) : + new AzureOpenAITextToImageService("deployment", endpoint: "https://api-host", "api-key", "dall-e-3", this._httpClient); + + // Act + var result = await sut.GenerateImageAsync("description", 1024, 1024); + + // Assert + Assert.StartsWith("https://api-host", this._messageHandlerStub.RequestUri?.AbsoluteUri); + } + + [Theory] + [InlineData(true, "")] + [InlineData(true, null)] + [InlineData(false, "")] + [InlineData(false, null)] + public async Task ItShouldUseHttpClientUriIfNoEndpointProvided(bool useTokeCredential, string? endpoint) + { + // Arrange + this._httpClient.BaseAddress = new Uri("https://api-host"); + + var sut = useTokeCredential ? + new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, new Mock().Object, "dall-e-3", this._httpClient) : + new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, "api-key", "dall-e-3", this._httpClient); + + // Act + var result = await sut.GenerateImageAsync("description", 1024, 1024); + + // Assert + Assert.StartsWith("https://api-host", this._messageHandlerStub.RequestUri?.AbsoluteUri); + } + + [Theory] + [InlineData(true, "")] + [InlineData(true, null)] + [InlineData(false, "")] + [InlineData(false, null)] + public void ItShouldThrowExceptionIfNoEndpointProvided(bool useTokeCredential, string? endpoint) + { + // Arrange + this._httpClient.BaseAddress = null; + + // Act & Assert + if (useTokeCredential) + { + Assert.Throws(() => new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, new Mock().Object, "dall-e-3", this._httpClient)); + } + else + { + Assert.Throws(() => new AzureOpenAITextToImageService("deployment", endpoint: endpoint!, "api-key", "dall-e-3", this._httpClient)); + } + } + + [Theory] + [InlineData(null, null)] + [InlineData("uri", "url")] + [InlineData("url", "url")] + [InlineData("GeneratedImage.Uri", "url")] + [InlineData("bytes", "b64_json")] + [InlineData("b64_json", "b64_json")] + [InlineData("GeneratedImage.Bytes", "b64_json")] + public async Task GetUriImageContentsResponseFormatRequestWorksCorrectlyAsync(string? responseFormatOption, string? expectedResponseFormat) + { + // Arrange + object? responseFormatObject = null; + + switch (responseFormatOption) + { + case "GeneratedImage.Uri": responseFormatObject = GeneratedImageFormat.Uri; break; + case "GeneratedImage.Bytes": responseFormatObject = GeneratedImageFormat.Bytes; break; + default: responseFormatObject = responseFormatOption; break; + } + + this._httpClient.BaseAddress = new Uri("https://api-host"); + var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock().Object, "dall-e-3", this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { ResponseFormat = responseFormatObject }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedResponseFormat is not null) + { + Assert.Contains($"\"response_format\":\"{expectedResponseFormat}\"", requestBody); + } + else + { + // Then no response format is provided, it should not be included in the request body + Assert.DoesNotContain("response_format", requestBody); + } + } + + [Theory] + [InlineData(null, null)] + [InlineData("hd", "hd")] + [InlineData("high", "hd")] + [InlineData("standard", "standard")] + public async Task GetUriImageContentsImageQualityRequestWorksCorrectlyAsync(string? quality, string? expectedQuality) + { + // Arrange + this._httpClient.BaseAddress = new Uri("https://api-host"); + var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock().Object, "dall-e-3", this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { Quality = quality }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedQuality is not null) + { + Assert.Contains($"\"quality\":\"{expectedQuality}\"", requestBody); + } + else + { + // Then no quality is provided, it should not be included in the request body + Assert.DoesNotContain("quality", requestBody); + } + } + + [Theory] + [InlineData(null, null)] + [InlineData("vivid", "vivid")] + [InlineData("natural", "natural")] + public async Task GetUriImageContentsImageStyleRequestWorksCorrectlyAsync(string? style, string? expectedStyle) + { + // Arrange + this._httpClient.BaseAddress = new Uri("https://api-host"); + var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock().Object, "dall-e-3", this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { Style = style }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedStyle is not null) + { + Assert.Contains($"\"style\":\"{expectedStyle}\"", requestBody); + } + else + { + // Then no style is provided, it should not be included in the request body + Assert.DoesNotContain("style", requestBody); + } + } + + [Theory] + [InlineData(null, null, null)] + [InlineData(1, 2, "1x2")] + public async Task GetUriImageContentsImageSizeRequestWorksCorrectlyAsync(int? width, int? height, string? expectedSize) + { + // Arrange + this._httpClient.BaseAddress = new Uri("https://api-host"); + var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock().Object, "dall-e-3", this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings + { + Size = width.HasValue && height.HasValue + ? (width.Value, height.Value) + : null + }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedSize is not null) + { + Assert.Contains($"\"size\":\"{expectedSize}\"", requestBody); + } + else + { + // Then no size is provided, it should not be included in the request body + Assert.DoesNotContain("size", requestBody); + } + } + + [Fact] + public async Task GetByteImageContentsResponseWorksCorrectlyAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-to-image-b64_json-format-response.json")) + }; + + this._httpClient.BaseAddress = new Uri("https://api-host"); + var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock().Object, "dall-e-3", this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { ResponseFormat = "b64_json" }); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + var imageContent = result[0]; + Assert.NotNull(imageContent); + Assert.True(imageContent.CanRead); + Assert.Equal("image/png", imageContent.MimeType); + Assert.NotNull(imageContent.InnerContent); + Assert.IsType(imageContent.InnerContent); + + var breakingGlass = imageContent.InnerContent as GeneratedImage; + Assert.Equal("my prompt", breakingGlass!.RevisedPrompt); + } + + [Fact] + public async Task GetUrlImageContentsResponseWorksCorrectlyAsync() + { + // Arrange + this._httpClient.BaseAddress = new Uri("https://api-host"); + var sut = new AzureOpenAITextToImageService("deployment", endpoint: null!, credential: new Mock().Object, "dall-e-3", this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { ResponseFormat = "url" }); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + var imageContent = result[0]; + Assert.NotNull(imageContent); + Assert.False(imageContent.CanRead); + Assert.Equal(new Uri("https://image-url/"), imageContent.Uri); + Assert.NotNull(imageContent.InnerContent); + Assert.IsType(imageContent.InnerContent); + + var breakingGlass = imageContent.InnerContent as GeneratedImage; + Assert.Equal("my prompt", breakingGlass!.RevisedPrompt); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..d8ff5b1e0d79 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs @@ -0,0 +1,311 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +#pragma warning disable CS0618 // Type or member is obsolete + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Settings; + +/// +/// Unit tests for class. +/// +public class AzureOpenAIPromptExecutionSettingsTests +{ + [Fact] + public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults() + { + // Arrange + var maxTokensSettings = 128; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(null, maxTokensSettings); + + // Assert + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); + Assert.Null(executionSettings.FrequencyPenalty); + Assert.Null(executionSettings.PresencePenalty); + Assert.Null(executionSettings.StopSequences); + Assert.Null(executionSettings.TokenSelectionBiases); + Assert.Null(executionSettings.TopLogprobs); + Assert.Null(executionSettings.Logprobs); + Assert.Null(executionSettings.AzureChatDataSource); + Assert.Equal(maxTokensSettings, executionSettings.MaxTokens); + } + + [Fact] + public void ItUsesExistingOpenAIExecutionSettings() + { + // Arrange + AzureOpenAIPromptExecutionSettings actualSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + MaxTokens = 128, + Logprobs = true, + TopLogprobs = 5, + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + Assert.Equal(actualSettings, executionSettings); + } + + [Fact] + public void ItCanUseOpenAIExecutionSettings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() { + { "max_tokens", 1000 }, + { "temperature", 0 } + } + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + Assert.NotNull(executionSettings); + Assert.Equal(1000, executionSettings.MaxTokens); + Assert.Equal(0, executionSettings.Temperature); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", 0.7 }, + { "frequency_penalty", 0.7 }, + { "presence_penalty", 0.7 }, + { "stop_sequences", new [] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "max_tokens", 128 }, + { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } }, + { "seed", 123456 }, + { "logprobs", true }, + { "top_logprobs", 5 }, + } + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings() + { + // Arrange + PromptExecutionSettings actualSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", "0.7" }, + { "top_p", "0.7" }, + { "frequency_penalty", "0.7" }, + { "presence_penalty", "0.7" }, + { "stop_sequences", new [] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "max_tokens", "128" }, + { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } }, + { "seed", 123456 }, + { "logprobs", true }, + { "top_logprobs", 5 } + } + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings, null); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase() + { + // Arrange + var json = """ + { + "temperature": 0.7, + "top_p": 0.7, + "frequency_penalty": 0.7, + "presence_penalty": 0.7, + "stop_sequences": [ "foo", "bar" ], + "chat_system_prompt": "chat system prompt", + "token_selection_biases": { "1": 2, "3": 4 }, + "max_tokens": 128, + "seed": 123456, + "logprobs": true, + "top_logprobs": 5 + } + """; + var actualSettings = JsonSerializer.Deserialize(json); + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(actualSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Theory] + [InlineData("", "")] + [InlineData("System prompt", "System prompt")] + public void ItUsesCorrectChatSystemPrompt(string chatSystemPrompt, string expectedChatSystemPrompt) + { + // Arrange & Act + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = chatSystemPrompt }; + + // Assert + Assert.Equal(expectedChatSystemPrompt, settings.ChatSystemPrompt); + } + + [Fact] + public void PromptExecutionSettingsCloneWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0 + } + """; + var executionSettings = JsonSerializer.Deserialize(configPayload); + + // Act + var clone = executionSettings!.Clone(); + + // Assert + Assert.Equal(executionSettings.ModelId, clone.ModelId); + Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData); + } + + [Fact] + public void PromptExecutionSettingsFreezeWorksAsExpected() + { + // Arrange + string configPayload = """ + { + "max_tokens": 60, + "temperature": 0.5, + "top_p": 0.0, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "stop_sequences": [ "DONE" ], + "token_selection_biases": { "1": 2, "3": 4 } + } + """; + var executionSettings = JsonSerializer.Deserialize(configPayload); + + // Act + executionSettings!.Freeze(); + + // Assert + Assert.True(executionSettings.IsFrozen); + Assert.Throws(() => executionSettings.ModelId = "gpt-4"); + Assert.Throws(() => executionSettings.Temperature = 1); + Assert.Throws(() => executionSettings.TopP = 1); + Assert.Throws(() => executionSettings.StopSequences?.Add("STOP")); + Assert.Throws(() => executionSettings.TokenSelectionBiases?.Add(5, 6)); + + executionSettings!.Freeze(); // idempotent + Assert.True(executionSettings.IsFrozen); + } + + [Fact] + public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences() + { + // Arrange + var executionSettings = new AzureOpenAIPromptExecutionSettings { StopSequences = [] }; + + // Act + var executionSettingsWithData = AzureOpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); + + // Assert + Assert.Null(executionSettingsWithData.StopSequences); + } + + [Fact] + public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecutionSettings() + { + // Arrange + OpenAIPromptExecutionSettings originalSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + MaxTokens = 128, + Logprobs = true, + Seed = 123456, + TopLogprobs = 5, + ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions + }; + + // Act + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItRestoresOriginalFunctionChoiceBehavior() + { + // Arrange + var functionChoiceBehavior = FunctionChoiceBehavior.Auto(); + + var originalExecutionSettings = new PromptExecutionSettings(); + originalExecutionSettings.FunctionChoiceBehavior = functionChoiceBehavior; + + // Act + var result = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalExecutionSettings); + + // Assert + Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior); + } + + private static void AssertExecutionSettings(AzureOpenAIPromptExecutionSettings executionSettings) + { + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.7, executionSettings.TopP); + Assert.Equal(0.7, executionSettings.FrequencyPenalty); + Assert.Equal(0.7, executionSettings.PresencePenalty); + Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); + Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); + Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); + Assert.Equal(128, executionSettings.MaxTokens); + Assert.Equal(123456, executionSettings.Seed); + Assert.Equal(true, executionSettings.Logprobs); + Assert.Equal(5, executionSettings.TopLogprobs); + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..b4bfd8634808 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -0,0 +1,82 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Azure.AI.OpenAI.Chat; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; + +namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Settings; + +/// +/// Unit tests for class. +/// +public class OpenAIPromptExecutionSettingsTests +{ + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromAzureOpenAIPromptExecutionSettings() + { + // Arrange + AzureOpenAIPromptExecutionSettings originalSettings = new() + { + Temperature = 0.7, + TopP = 0.7, + FrequencyPenalty = 0.7, + PresencePenalty = 0.7, + StopSequences = new string[] { "foo", "bar" }, + ChatSystemPrompt = "chat system prompt", + TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } }, + MaxTokens = 128, + Logprobs = true, + Seed = 123456, + TopLogprobs = 5, +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + AzureChatDataSource = new AzureSearchChatDataSource + { + Endpoint = new Uri("https://test-host"), + Authentication = DataSourceAuthentication.FromApiKey("api-key"), + IndexName = "index-name" + } +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItRestoresOriginalFunctionChoiceBehavior() + { + // Arrange + var functionChoiceBehavior = FunctionChoiceBehavior.Auto(); + + var originalExecutionSettings = new PromptExecutionSettings(); + originalExecutionSettings.FunctionChoiceBehavior = functionChoiceBehavior; + + // Act + var result = OpenAIPromptExecutionSettings.FromExecutionSettings(originalExecutionSettings); + + // Assert + Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior); + } + + private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) + { + Assert.NotNull(executionSettings); + Assert.Equal(0.7, executionSettings.Temperature); + Assert.Equal(0.7, executionSettings.TopP); + Assert.Equal(0.7, executionSettings.FrequencyPenalty); + Assert.Equal(0.7, executionSettings.PresencePenalty); + Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); + Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); + Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); + Assert.Equal(128, executionSettings.MaxTokens); + Assert.Equal(123456, executionSettings.Seed); + Assert.Equal(true, executionSettings.Logprobs); + Assert.Equal(5, executionSettings.TopLogprobs); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_multiple_function_calls_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_single_function_call_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt new file mode 100644 index 000000000000..078ad45af412 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_async_filter_response.txt @@ -0,0 +1,13 @@ +data: {"choices":[],"created":0,"id":"","model":"","object":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"jailbreak":{"filtered":false,"detected":false},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}]} + +data: {"choices":[{"delta":{"content":"","role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"delta":{"content":"Kindness"},"finish_reason":null,"index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"delta":{},"finish_reason":"stop","index":0,"logprobs":null}],"created":1724860848,"id":"chatcmpl-123","model":"gpt-4o-2024-05-13","object":"chat.completion.chunk","system_fingerprint":"fp_abc28019ad"} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"protected_material_code":{"filtered":false,"detected":false},"protected_material_text":{"filtered":false,"detected":false}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: [DONE] \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_async_filter_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_async_filter_response.txt new file mode 100644 index 000000000000..ef809b40f5e7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_async_filter_response.txt @@ -0,0 +1,15 @@ +data: {"choices":[],"created":0,"id":"","model":"","object":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"jailbreak":{"filtered":false,"detected":false},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-FunctionWithException","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":2,"id":"3","type":"function","function":{"name":"MyPlugin-NonExistentFunction","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":3,"id":"4","type":"function","function":{"name":"MyPlugin-InvalidArguments","arguments":"invalid_arguments_format"}}]},"finish_reason":"tool_calls"}]} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: {"choices":[{"content_filter_offsets":{"check_offset":1576,"start_offset":1576,"end_offset":2318},"content_filter_results":{"protected_material_code":{"filtered":false,"detected":false},"protected_material_text":{"filtered":false,"detected":false}},"finish_reason":null,"index":0}],"created":0,"id":"","model":"","object":""} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_single_function_call_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_streaming_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_test_response.json diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_streaming_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/chat_completion_with_data_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json similarity index 92% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_multiple_function_calls_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json index eb695f292c96..3ffa6b00cc3f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_multiple_function_calls_test_response.json +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json @@ -11,7 +11,7 @@ "content": null, "tool_calls": [ { - "id": "tool-call-id-1", + "id": "1", "type": "function", "function": { "name": "MyPlugin-Function1", @@ -19,7 +19,7 @@ } }, { - "id": "tool-call-id-2", + "id": "2", "type": "function", "function": { "name": "MyPlugin-Function2", diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..c8aeb98e8b82 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt new file mode 100644 index 000000000000..46a9581cf0cc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt @@ -0,0 +1,20 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + }, + { + "object": "embedding", + "index": 1, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt new file mode 100644 index 000000000000..c715b851b78c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-embeddings-response.txt @@ -0,0 +1,15 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-b64_json-format-response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-b64_json-format-response.json new file mode 100644 index 000000000000..e004607fa8f0 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-b64_json-format-response.json @@ -0,0 +1,9 @@ +{ + "created": 1726234481, + "data": [ + { + "b64_json": "iVBORw0KGgoAAA==", + "revised_prompt": "my prompt" + } + ] +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.json new file mode 100644 index 000000000000..8fd01a13c7ac --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text-to-image-response.json @@ -0,0 +1,9 @@ +{ + "created": 1702575371, + "data": [ + { + "revised_prompt": "my prompt", + "url": "https://image-url/" + } + ] +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_streaming_test_response.txt rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_streaming_test_response.txt diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json similarity index 100% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/text_completion_test_response.json rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/TestData/text_completion_test_response.json diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/CompatibilitySuppressions.xml b/dotnet/src/Connectors/Connectors.AzureOpenAI/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..58f8c8c61ae4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/CompatibilitySuppressions.xml @@ -0,0 +1,32 @@ +๏ปฟ + + + + CP0002 + M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAIPromptExecutionSettings.get_AzureChatDataSource + lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + lib/net8.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAIPromptExecutionSettings.get_AzureChatDataSource + lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Connectors.AzureOpenAI.AzureOpenAITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.AzureOpenAI.dll + true + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj new file mode 100644 index 000000000000..15d88496159b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Connectors.AzureOpenAI.csproj @@ -0,0 +1,38 @@ +๏ปฟ + + + + Microsoft.SemanticKernel.Connectors.AzureOpenAI + $(AssemblyName) + net8.0;netstandard2.0 + true + $(NoWarn);NU5104;SKEXP0001,SKEXP0010 + true + + + + rc + + + + + + + + Semantic Kernel - Azure OpenAI connectors + Semantic Kernel connectors for Azure OpenAI. Contains clients for chat completion, embedding and DALL-E text to image. + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs new file mode 100644 index 000000000000..6627b7482fae --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs @@ -0,0 +1,95 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Azure.AI.OpenAI.Chat; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Diagnostics; +using OpenAI.Chat; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with Azure OpenAI services. +/// +internal partial class AzureClientCore +{ + /// + protected override OpenAIPromptExecutionSettings GetSpecializedExecutionSettings(PromptExecutionSettings? executionSettings) + => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + + /// + protected override Activity? StartCompletionActivity(ChatHistory chatHistory, PromptExecutionSettings settings) + => ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentName, ModelProvider, chatHistory, settings); + + /// + protected override ChatCompletionOptions CreateChatCompletionOptions( + OpenAIPromptExecutionSettings executionSettings, + ChatHistory chatHistory, + ToolCallingConfig toolCallingConfig, + Kernel? kernel) + { + if (executionSettings is not AzureOpenAIPromptExecutionSettings azureSettings) + { + return base.CreateChatCompletionOptions(executionSettings, chatHistory, toolCallingConfig, kernel); + } + + var options = new ChatCompletionOptions + { + MaxOutputTokenCount = executionSettings.MaxTokens, + Temperature = (float?)executionSettings.Temperature, + TopP = (float?)executionSettings.TopP, + FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, + PresencePenalty = (float?)executionSettings.PresencePenalty, +#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + Seed = executionSettings.Seed, +#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + EndUserId = executionSettings.User, + TopLogProbabilityCount = executionSettings.TopLogprobs, + IncludeLogProbabilities = executionSettings.Logprobs, + }; + + var responseFormat = GetResponseFormat(executionSettings); + if (responseFormat is not null) + { + options.ResponseFormat = responseFormat; + } + + if (toolCallingConfig.Choice is not null) + { + options.ToolChoice = toolCallingConfig.Choice; + } + + if (toolCallingConfig.Tools is { Count: > 0 } tools) + { + options.Tools.AddRange(tools); + } + + if (azureSettings.AzureChatDataSource is not null) + { +#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + options.AddDataSource(azureSettings.AzureChatDataSource); +#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + } + + if (executionSettings.TokenSelectionBiases is not null) + { + foreach (var keyValue in executionSettings.TokenSelectionBiases) + { + options.LogitBiases.Add(keyValue.Key, keyValue.Value); + } + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + return options; + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs new file mode 100644 index 000000000000..820c87c023e8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.cs @@ -0,0 +1,146 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Net.Http; +using System.Threading; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +#pragma warning disable IDE0005 // Using directive is unnecessary +using Microsoft.SemanticKernel.Connectors.FunctionCalling; +#pragma warning restore IDE0005 // Using directive is unnecessary +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Http; +using OpenAI; + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with Azure OpenAI services. +/// +internal partial class AzureClientCore : ClientCore +{ + /// + /// Gets the key used to store the deployment name in the dictionary. + /// + internal static string DeploymentNameKey => "DeploymentName"; + + /// + /// Deployment name. + /// + internal string DeploymentName { get; set; } = string.Empty; + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal AzureClientCore( + string deploymentName, + string endpoint, + string apiKey, + HttpClient? httpClient = null, + ILogger? logger = null) + { + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); + Verify.NotNullOrWhiteSpace(apiKey); + + var options = GetAzureOpenAIClientOptions(httpClient); + + this.Logger = logger ?? NullLogger.Instance; + this.DeploymentName = deploymentName; + this.Endpoint = new Uri(endpoint); + this.Client = new AzureOpenAIClient(this.Endpoint, new ApiKeyCredential(apiKey), options); + this.FunctionCallsProcessor = new FunctionCallsProcessor(this.Logger); + + this.AddAttribute(DeploymentNameKey, deploymentName); + } + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal AzureClientCore( + string deploymentName, + string endpoint, + TokenCredential credential, + HttpClient? httpClient = null, + ILogger? logger = null) + { + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); + + var options = GetAzureOpenAIClientOptions(httpClient); + + this.Logger = logger ?? NullLogger.Instance; + this.DeploymentName = deploymentName; + this.Endpoint = new Uri(endpoint); + this.Client = new AzureOpenAIClient(this.Endpoint, credential, options); + this.FunctionCallsProcessor = new FunctionCallsProcessor(this.Logger); + + this.AddAttribute(DeploymentNameKey, deploymentName); + } + + /// + /// Initializes a new instance of the class.. + /// Note: instances created this way might not have the default diagnostics settings, + /// it's up to the caller to configure the client. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// The to use for logging. If null, no logging will be performed. + internal AzureClientCore( + string deploymentName, + AzureOpenAIClient openAIClient, + ILogger? logger = null) + { + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNull(openAIClient); + + this.Logger = logger ?? NullLogger.Instance; + this.DeploymentName = deploymentName; + this.Client = openAIClient; + this.FunctionCallsProcessor = new FunctionCallsProcessor(this.Logger); + + this.AddAttribute(DeploymentNameKey, deploymentName); + } + + /// Gets options to use for an OpenAIClient + /// Custom for HTTP requests. + /// Optional API version. + /// An instance of . + internal static AzureOpenAIClientOptions GetAzureOpenAIClientOptions(HttpClient? httpClient, AzureOpenAIClientOptions.ServiceVersion? serviceVersion = null) + { + AzureOpenAIClientOptions options = serviceVersion is not null + ? new(serviceVersion.Value) { UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent } + : new() { UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent }; + + options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureClientCore))), PipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientPipelineTransport(httpClient); + options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. + options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout + } + + return options; + } + + /// + protected override string GetClientModelId() + => this.DeploymentName; +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs new file mode 100644 index 000000000000..dd69d995b62e --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIKernelBuilderExtensions.cs @@ -0,0 +1,561 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for to configure Azure OpenAI connectors. +/// +public static class AzureOpenAIKernelBuilderExtensions +{ + #region Chat Completion + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new ApiKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IKernelBuilder AddAzureOpenAIChatCompletion( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, azureOpenAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + #endregion + + #region Text Embedding + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + Verify.NotNull(builder); + Verify.NotNull(credential); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + azureOpenAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToAudio( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credential); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToAudioService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Text-to-Audio + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToAudio( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToAudioService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Images + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + credentials, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + apiVersion)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + apiVersion)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAITextToImage( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? modelId = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + azureOpenAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Audio-to-Text + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIAudioToText( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new ApiKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIAudioToText( + this IKernelBuilder builder, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddAzureOpenAIAudioToText( + this IKernelBuilder builder, + string deploymentName, + AzureOpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + #endregion + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, ApiKeyCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs new file mode 100644 index 000000000000..37188d4a34aa --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Extensions/AzureOpenAIServiceCollectionExtensions.cs @@ -0,0 +1,496 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; + +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Provides extension methods for to configure Azure OpenAI connectors. +/// +public static class AzureOpenAIServiceCollectionExtensions +{ + #region Chat Completion + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new ApiKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(serviceProvider)); + + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + public static IServiceCollection AddAzureOpenAIChatCompletion( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, azureOpenAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + #endregion + + #region Text Embedding + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + dimensions)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credential, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(services); + Verify.NotNull(credential); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + endpoint, + credential, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + dimensions)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? azureOpenAIClient = null, + string? serviceId = null, + string? modelId = null, + int? dimensions = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextEmbeddingGenerationService( + deploymentName, + azureOpenAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService(), + dimensions)); + } + + #endregion + + #region Text-to-Audio + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToAudio( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToAudioService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + #endregion + + #region Images + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Azure OpenAI API version + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? modelId = null, + string? serviceId = null, + string? apiVersion = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + credentials, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + apiVersion)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Maximum number of attempts to retrieve the text to image operation result. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null, + int maxRetryCount = 5) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + endpoint, + apiKey, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAITextToImage( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? openAIClient = null, + string? modelId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new AzureOpenAITextToImageService( + deploymentName, + openAIClient ?? serviceProvider.GetRequiredService(), + modelId, + serviceProvider.GetService())); + } + + #endregion + + #region Audio-to-Text + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIAudioToText( + this IServiceCollection services, + string deploymentName, + string endpoint, + string apiKey, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + new ApiKeyCredential(apiKey), + HttpClientProvider.GetHttpClient(serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIAudioToText( + this IServiceCollection services, + string deploymentName, + string endpoint, + TokenCredential credentials, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + Verify.NotNullOrWhiteSpace(endpoint); + Verify.NotNull(credentials); + + Func factory = (serviceProvider, _) => + { + AzureOpenAIClient client = CreateAzureOpenAIClient( + endpoint, + credentials, + HttpClientProvider.GetHttpClient(serviceProvider)); + return new(deploymentName, client, modelId, serviceProvider.GetService()); + }; + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddAzureOpenAIAudioToText( + this IServiceCollection services, + string deploymentName, + AzureOpenAIClient? openAIClient = null, + string? serviceId = null, + string? modelId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(deploymentName); + + Func factory = (serviceProvider, _) => + new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + #endregion + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, ApiKeyCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); + + private static AzureOpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => + new(new Uri(endpoint), credentials, AzureClientCore.GetAzureOpenAIClientOptions(httpClient)); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/AzureOpenAIAudioToTextService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIAudioToTextService.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.OpenAI/AudioToText/AzureOpenAIAudioToTextService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIAudioToTextService.cs index 2e065876b779..b8dfccdf06bf 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/AzureOpenAIAudioToTextService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIAudioToTextService.cs @@ -11,22 +11,22 @@ using Microsoft.SemanticKernel.AudioToText; using Microsoft.SemanticKernel.Services; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI audio-to-text service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class AzureOpenAIAudioToTextService : IAudioToTextService { /// Core implementation shared by Azure OpenAI services. - private readonly AzureOpenAIClientCore _core; + private readonly AzureClientCore _client; /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// - /// Creates an instance of the with API key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -42,12 +42,12 @@ public AzureOpenAIAudioToTextService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Creates an instance of the with AAD auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -63,25 +63,25 @@ public AzureOpenAIAudioToTextService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Creates an instance of the using the specified . + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . + /// Custom . /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// The to use for logging. If null, no logging will be performed. public AzureOpenAIAudioToTextService( string deploymentName, - OpenAIClient openAIClient, + AzureOpenAIClient azureOpenAIClient, string? modelId = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIAudioToTextService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// @@ -90,5 +90,5 @@ public Task> GetTextContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetTextContentFromAudioAsync(content, executionSettings, cancellationToken); + => this._client.GetTextFromAudioContentsAsync(this._client.DeploymentName, content, executionSettings, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIChatCompletionService.cs similarity index 72% rename from dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIChatCompletionService.cs index 04da5d2dc1e3..47cca54662bc 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/AzureOpenAIChatCompletionService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAIChatCompletionService.cs @@ -11,7 +11,7 @@ using Microsoft.SemanticKernel.Services; using Microsoft.SemanticKernel.TextGeneration; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI chat completion service. @@ -19,10 +19,10 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; public sealed class AzureOpenAIChatCompletionService : IChatCompletionService, ITextGenerationService { /// Core implementation shared by Azure OpenAI clients. - private readonly AzureOpenAIClientCore _core; + private readonly AzureClientCore _client; /// - /// Create an instance of the connector with API key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -38,13 +38,13 @@ public AzureOpenAIChatCompletionService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Create an instance of the connector with AAD auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -60,43 +60,43 @@ public AzureOpenAIChatCompletionService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, endpoint, credentials, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - /// Creates a new client instance using the specified . + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . + /// Custom . /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// The to use for logging. If null, no logging will be performed. public AzureOpenAIChatCompletionService( string deploymentName, - OpenAIClient openAIClient, + AzureOpenAIClient azureOpenAIClient, string? modelId = null, ILoggerFactory? loggerFactory = null) { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAIChatCompletionService))); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + => this._client.GetChatMessageContentsAsync(this._client.DeploymentName, chatHistory, executionSettings, kernel, cancellationToken); /// public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + => this._client.GetStreamingChatMessageContentsAsync(this._client.DeploymentName, chatHistory, executionSettings, kernel, cancellationToken); /// public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + => this._client.GetChatAsTextContentsAsync(this._client.DeploymentName, prompt, executionSettings, kernel, cancellationToken); /// public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken); + => this._client.GetChatAsTextStreamingContentsAsync(this._client.DeploymentName, prompt, executionSettings, kernel, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextEmbeddingGenerationService.cs similarity index 76% rename from dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs rename to dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextEmbeddingGenerationService.cs index 63fbdbdccb2b..bcbcfbb67087 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationService.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextEmbeddingGenerationService.cs @@ -12,7 +12,7 @@ using Microsoft.SemanticKernel.Embeddings; using Microsoft.SemanticKernel.Services; -namespace Microsoft.SemanticKernel.Connectors.OpenAI; +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; /// /// Azure OpenAI text embedding service. @@ -20,11 +20,11 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; [Experimental("SKEXP0010")] public sealed class AzureOpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService { - private readonly AzureOpenAIClientCore _core; + private readonly AzureClientCore _client; private readonly int? _dimensions; /// - /// Creates a new client instance using API Key auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -42,15 +42,15 @@ public AzureOpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + this._client = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); this._dimensions = dimensions; } /// - /// Creates a new client instance supporting AAD auth. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart @@ -68,37 +68,37 @@ public AzureOpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + this._client = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); this._dimensions = dimensions; } /// - /// Creates a new client. + /// Initializes a new instance of the class. /// /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. + /// Custom for HTTP requests. /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource /// The to use for logging. If null, no logging will be performed. /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. public AzureOpenAITextEmbeddingGenerationService( string deploymentName, - OpenAIClient openAIClient, + AzureOpenAIClient azureOpenAIClient, string? modelId = null, ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); this._dimensions = dimensions; } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// public Task>> GenerateEmbeddingsAsync( @@ -106,6 +106,6 @@ public Task>> GenerateEmbeddingsAsync( Kernel? kernel = null, CancellationToken cancellationToken = default) { - return this._core.GetEmbeddingsAsync(data, kernel, this._dimensions, cancellationToken); + return this._client.GetEmbeddingsAsync(this._client.DeploymentName, data, kernel, this._dimensions, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToAudioService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToAudioService.cs new file mode 100644 index 000000000000..e860d4cc0e27 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToAudioService.cs @@ -0,0 +1,117 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextToAudio; + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Azure OpenAI text-to-audio service. +/// +[Experimental("SKEXP0010")] +public sealed class AzureOpenAITextToAudioService : ITextToAudioService +{ + /// + /// Azure OpenAI text-to-audio client. + /// + private readonly AzureClientCore _client; + + /// + /// Azure OpenAI model id. + /// + private readonly string? _modelId; + + /// + public IReadOnlyDictionary Attributes => this._client.Attributes; + + /// + /// Gets the key used to store the deployment name in the dictionary. + /// + public static string DeploymentNameKey => "DeploymentName"; + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextToAudioService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + var url = !string.IsNullOrWhiteSpace(httpClient?.BaseAddress?.AbsoluteUri) ? httpClient!.BaseAddress!.AbsoluteUri : endpoint; + + var options = AzureClientCore.GetAzureOpenAIClientOptions(httpClient); // https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#text-to-speech + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(url), new ApiKeyCredential(apiKey), options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextToAudioService))); + + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + + this._modelId = modelId; + } + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextToAudioService( + string deploymentName, + string endpoint, + TokenCredential credential, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + var url = !string.IsNullOrWhiteSpace(httpClient?.BaseAddress?.AbsoluteUri) ? httpClient!.BaseAddress!.AbsoluteUri : endpoint; + + var options = AzureClientCore.GetAzureOpenAIClientOptions(httpClient); // https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#text-to-speech + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(url), credential, options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextToAudioService))); + + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + + this._modelId = modelId; + } + + /// + public Task> GetAudioContentsAsync( + string text, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetAudioContentsAsync(this.GetModelId(executionSettings), text, executionSettings, cancellationToken); + + private string GetModelId(PromptExecutionSettings? executionSettings) + { + return + !string.IsNullOrWhiteSpace(this._modelId) ? this._modelId! : + !string.IsNullOrWhiteSpace(executionSettings?.ModelId) ? executionSettings!.ModelId! : + this._client.DeploymentName; + } +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs new file mode 100644 index 000000000000..31ad12948dcb --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Services/AzureOpenAITextToImageService.cs @@ -0,0 +1,130 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Azure.AI.OpenAI; +using Azure.Core; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextToImage; + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Azure OpenAI text to image service. +/// +[Experimental("SKEXP0010")] +public class AzureOpenAITextToImageService : ITextToImageService +{ + private readonly AzureClientCore _client; + + /// + public IReadOnlyDictionary Attributes => this._client.Attributes; + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + /// Azure OpenAI service API version, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + public AzureOpenAITextToImageService( + string deploymentName, + string endpoint, + string apiKey, + string? modelId, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null, + string? apiVersion = null) + { + Verify.NotNullOrWhiteSpace(apiKey); + + var connectorEndpoint = !string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri; + if (connectorEndpoint is null) + { + throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); + } + + var options = AzureClientCore.GetAzureOpenAIClientOptions(httpClient); // DALL-E 3 is supported in the latest API releases - https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#image-generation + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(connectorEndpoint), new ApiKeyCredential(apiKey), options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(this.GetType())); + + if (modelId is not null) + { + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + } + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + /// Azure OpenAI service API version, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + public AzureOpenAITextToImageService( + string deploymentName, + string endpoint, + TokenCredential credential, + string? modelId, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null, + string? apiVersion = null) + { + Verify.NotNull(credential); + + var connectorEndpoint = (!string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri) + ?? throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); + + var options = AzureClientCore.GetAzureOpenAIClientOptions(httpClient); // DALL-E 3 is supported in the latest API releases - https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#image-generation + + var azureOpenAIClient = new AzureOpenAIClient(new Uri(connectorEndpoint), credential, options); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(this.GetType())); + + if (modelId is not null) + { + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + } + + /// + /// Initializes a new instance of the class. + /// + /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// Custom . + /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + /// The to use for logging. If null, no logging will be performed. + public AzureOpenAITextToImageService( + string deploymentName, + AzureOpenAIClient azureOpenAIClient, + string? modelId, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNull(azureOpenAIClient); + + this._client = new(deploymentName, azureOpenAIClient, loggerFactory?.CreateLogger(this.GetType())); + + if (modelId is not null) + { + this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + } + + /// + public Task> GetImageContentsAsync(TextContent input, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) + => this._client.GetImageContentsAsync(this._client.DeploymentName, input, executionSettings, kernel, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs new file mode 100644 index 000000000000..1d00ba3207f5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs @@ -0,0 +1,108 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.Json.Serialization; +using Azure.AI.OpenAI.Chat; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI; + +/// +/// Execution settings for an AzureOpenAI completion request. +/// +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class AzureOpenAIPromptExecutionSettings : OpenAIPromptExecutionSettings +{ + /// + /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions. + /// This property is compatible only with Azure OpenAI. + /// + [Experimental("SKEXP0010")] + [JsonIgnore] + public AzureSearchChatDataSource? AzureChatDataSource + { + get => this._azureChatDataSource; + + set + { + this.ThrowIfFrozen(); + this._azureChatDataSource = value; + } + } + + /// + public override PromptExecutionSettings Clone() + { + var settings = base.Clone(); + settings.AzureChatDataSource = this.AzureChatDataSource; + return settings; + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// Default max tokens + /// An instance of OpenAIPromptExecutionSettings + public static new AzureOpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + if (executionSettings is null) + { + return new AzureOpenAIPromptExecutionSettings() + { + MaxTokens = defaultMaxTokens + }; + } + + if (executionSettings is AzureOpenAIPromptExecutionSettings settings) + { + return settings; + } + + if (executionSettings is OpenAIPromptExecutionSettings openAISettings) + { + return openAISettings.Clone(); + } + + // Having the object as the type of the value to serialize is important to ensure all properties of the settings are serialized. + // Otherwise, only the properties ServiceId and ModelId from the public API of the PromptExecutionSettings class will be serialized. + var json = JsonSerializer.Serialize(executionSettings); + + var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + + // Restore the function choice behavior that lost internal state(list of function instances) during serialization/deserialization process. + openAIExecutionSettings!.FunctionChoiceBehavior = executionSettings.FunctionChoiceBehavior; + + return openAIExecutionSettings!; + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// Default max tokens + /// An instance of OpenAIPromptExecutionSettings + [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] + public static AzureOpenAIPromptExecutionSettings FromExecutionSettingsWithData(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + { + var settings = FromExecutionSettings(executionSettings, defaultMaxTokens); + + if (settings.StopSequences?.Count == 0) + { + // Azure OpenAI WithData API does not allow to send empty array of stop sequences + // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0" + settings.StopSequences = null; + } + + return settings; + } + + #region private ================================================================================ + [Experimental("SKEXP0010")] + private AzureSearchChatDataSource? _azureChatDataSource; + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatGenerationTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatGenerationTests.cs index 5232c40b005d..fe18febba2b4 100644 --- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatGenerationTests.cs +++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatGenerationTests.cs @@ -33,6 +33,24 @@ public GeminiChatGenerationTests() this._httpClient = new HttpClient(this._messageHandlerStub, false); } + [Fact] + public async Task ShouldReturnEmptyMessageContentAndNullMetadataIfEmptyJsonInResponseAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn.Content = new StringContent("{}"); + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var messages = await client.GenerateChatMessageAsync(chatHistory); + + // Assert + Assert.Single(messages, item => + item.Role == AuthorRole.Assistant && + string.IsNullOrEmpty(item.Content) && + item.Metadata == null); + } + [Fact] public async Task ShouldReturnEmptyMessageContentIfNoContentInResponseAsync() { @@ -417,12 +435,12 @@ public async Task ItCanUseValueTasksSequentiallyForBearerTokenAsync() using var httpClient = new HttpClient(multipleMessageHandlerStub, false); var client = new GeminiChatCompletionClient( - httpClient: httpClient, - modelId: "fake-model", - apiVersion: VertexAIVersion.V1, - bearerTokenProvider: () => bearerTokenGenerator.GetBearerToken(), - location: "fake-location", - projectId: "fake-project-id"); + httpClient: httpClient, + modelId: "fake-model", + apiVersion: VertexAIVersion.V1, + bearerTokenProvider: () => bearerTokenGenerator.GetBearerToken(), + location: "fake-location", + projectId: "fake-project-id"); var chatHistory = CreateSampleChatHistory(); diff --git a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatStreamingTests.cs b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatStreamingTests.cs index d47115fe4ebc..f5fb92803f5f 100644 --- a/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatStreamingTests.cs +++ b/dotnet/src/Connectors/Connectors.Google.UnitTests/Core/Gemini/Clients/GeminiChatStreamingTests.cs @@ -33,6 +33,24 @@ public GeminiChatStreamingTests() this._httpClient = new HttpClient(this._messageHandlerStub, false); } + [Fact] + public async Task ShouldReturnEmptyMessageContentAndNullMetadataIfEmptyJsonInResponseAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn.Content = new StringContent("{}"); + var client = this.CreateChatCompletionClient(); + var chatHistory = CreateSampleChatHistory(); + + // Act + var messages = await client.StreamGenerateChatMessageAsync(chatHistory).ToListAsync(); + + // Assert + Assert.Single(messages, item => + item.Role == AuthorRole.Assistant && + string.IsNullOrEmpty(item.Content) && + item.Metadata == null); + } + [Fact] public async Task ShouldReturnEmptyMessageContentIfNoContentInResponseAsync() { diff --git a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs index 9750af44c0c7..4b948140348f 100644 --- a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs +++ b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs @@ -559,15 +559,10 @@ private List ProcessChatResponse(GeminiResponse gemini private static void ValidateGeminiResponse(GeminiResponse geminiResponse) { - if (geminiResponse.Candidates is null || geminiResponse.Candidates.Count == 0) + if (geminiResponse.PromptFeedback?.BlockReason is not null) { - if (geminiResponse.PromptFeedback?.BlockReason is not null) - { - // TODO: Currently SK doesn't support prompt feedback/finish status, so we just throw an exception. I told SK team that we need to support it: https://github.com/microsoft/semantic-kernel/issues/4621 - throw new KernelException("Prompt was blocked due to Gemini API safety reasons."); - } - - throw new KernelException("Gemini API doesn't return any data."); + // TODO: Currently SK doesn't support prompt feedback/finish status, so we just throw an exception. I told SK team that we need to support it: https://github.com/microsoft/semantic-kernel/issues/4621 + throw new KernelException("Prompt was blocked due to Gemini API safety reasons."); } } @@ -596,7 +591,9 @@ private void LogUsage(List chatMessageContents) } private List GetChatMessageContentsFromResponse(GeminiResponse geminiResponse) - => geminiResponse.Candidates!.Select(candidate => this.GetChatMessageContentFromCandidate(geminiResponse, candidate)).ToList(); + => geminiResponse.Candidates == null ? + [new GeminiChatMessageContent(role: AuthorRole.Assistant, content: string.Empty, modelId: this._modelId)] + : geminiResponse.Candidates.Select(candidate => this.GetChatMessageContentFromCandidate(geminiResponse, candidate)).ToList(); private GeminiChatMessageContent GetChatMessageContentFromCandidate(GeminiResponse geminiResponse, GeminiResponseCandidate candidate) { @@ -630,7 +627,7 @@ private GeminiStreamingChatMessageContent GetStreamingChatContentFromChatContent modelId: this._modelId, calledToolResult: message.CalledToolResult, metadata: message.Metadata, - choiceIndex: message.Metadata!.Index); + choiceIndex: message.Metadata?.Index ?? 0); } if (message.ToolCalls is not null) @@ -641,14 +638,14 @@ private GeminiStreamingChatMessageContent GetStreamingChatContentFromChatContent modelId: this._modelId, toolCalls: message.ToolCalls, metadata: message.Metadata, - choiceIndex: message.Metadata!.Index); + choiceIndex: message.Metadata?.Index ?? 0); } return new GeminiStreamingChatMessageContent( role: message.Role, content: message.Content, modelId: this._modelId, - choiceIndex: message.Metadata!.Index, + choiceIndex: message.Metadata?.Index ?? 0, metadata: message.Metadata); } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs new file mode 100644 index 000000000000..33d995cf87e0 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchGenericDataModelMapper.cs @@ -0,0 +1,189 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure AI Search. +/// +internal class AzureAISearchGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +{ + /// A that defines the schema of the data in the database. + private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + + /// + /// Initializes a new instance of the class. + /// + /// A that defines the schema of the data in the database. + public AzureAISearchGenericDataModelMapper(VectorStoreRecordDefinition vectorStoreRecordDefinition) + { + Verify.NotNull(vectorStoreRecordDefinition); + + this._vectorStoreRecordDefinition = vectorStoreRecordDefinition; + } + + /// + public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + Verify.NotNull(dataModel); + + var storageJsonObject = new JsonObject(); + + // Loop through all known properties and map each from the data model json to the storage json. + foreach (var property in this._vectorStoreRecordDefinition.Properties) + { + if (property is VectorStoreRecordKeyProperty keyProperty) + { + var storagePropertyName = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; + storageJsonObject.Add(storagePropertyName, dataModel.Key); + } + else if (property is VectorStoreRecordDataProperty dataProperty) + { + if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var dataValue)) + { + var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; + var serializedJsonNode = JsonSerializer.SerializeToNode(dataValue); + storageJsonObject.Add(storagePropertyName, serializedJsonNode); + } + } + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.DataModelPropertyName, out var vectorValue)) + { + var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + var serializedJsonNode = JsonSerializer.SerializeToNode(vectorValue); + storageJsonObject.Add(storagePropertyName, serializedJsonNode); + } + } + } + + return storageJsonObject; + } + + /// + public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + { + Verify.NotNull(storageModel); + + // Create variables to store the response properties. + var dataProperties = new Dictionary(); + var vectorProperties = new Dictionary(); + string? key = null; + + // Loop through all known properties and map each from json to the data type. + foreach (var property in this._vectorStoreRecordDefinition.Properties) + { + if (property is VectorStoreRecordKeyProperty keyProperty) + { + var storagePropertyName = keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName; + var value = storageModel[storagePropertyName]; + if (value is null) + { + throw new VectorStoreRecordMappingException($"The key property '{storagePropertyName}' is missing from the record retrieved from storage."); + } + + key = (string)value!; + } + else if (property is VectorStoreRecordDataProperty dataProperty) + { + var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; + if (!storageModel.TryGetPropertyValue(storagePropertyName, out var value)) + { + continue; + } + + if (value is not null) + { + dataProperties.Add(dataProperty.DataModelPropertyName, GetDataPropertyValue(property.PropertyType, value)); + } + else + { + dataProperties.Add(dataProperty.DataModelPropertyName, null); + } + } + else if (property is VectorStoreRecordVectorProperty vectorProperty && options.IncludeVectors) + { + var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + if (!storageModel.TryGetPropertyValue(storagePropertyName, out var value)) + { + continue; + } + + if (value is not null) + { + ReadOnlyMemory vector = value.AsArray().Select(x => (float)x!).ToArray(); + vectorProperties.Add(vectorProperty.DataModelPropertyName, vector); + } + else + { + vectorProperties.Add(vectorProperty.DataModelPropertyName, null); + } + } + } + + if (key is null) + { + throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); + } + + return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; + } + + /// + /// Get the value of the given json node as the given property type. + /// + /// The type of property that is required. + /// The json node containing the property value. + /// The value of the json node as the required type. + private static object? GetDataPropertyValue(Type propertyType, JsonNode value) + { + if (propertyType == typeof(string)) + { + return (string?)value.AsValue(); + } + + if (propertyType == typeof(int) || propertyType == typeof(int?)) + { + return (int?)value.AsValue(); + } + + if (propertyType == typeof(long) || propertyType == typeof(long?)) + { + return (long?)value.AsValue(); + } + + if (propertyType == typeof(float) || propertyType == typeof(float?)) + { + return (float?)value.AsValue(); + } + + if (propertyType == typeof(double) || propertyType == typeof(double?)) + { + return (double?)value.AsValue(); + } + + if (propertyType == typeof(bool) || propertyType == typeof(bool?)) + { + return (bool?)value.AsValue(); + } + + if (propertyType == typeof(DateTimeOffset) || propertyType == typeof(DateTimeOffset?)) + { + return value.GetValue(); + } + + if (typeof(IEnumerable).IsAssignableFrom(propertyType)) + { + return value.Deserialize(propertyType); + } + + return null; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs index fc1061171671..f532bad39b6f 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchMemoryRecord.cs @@ -4,7 +4,6 @@ using System.Text; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; @@ -60,7 +59,6 @@ internal sealed class AzureAISearchMemoryRecord /// Content embedding /// [JsonPropertyName(EmbeddingField)] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Embedding { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs index 7e2de2e8e83e..ba518ddf6724 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchServiceCollectionExtensions.cs @@ -3,10 +3,13 @@ using System; using Azure; using Azure.Core; +using Azure.Core.Serialization; +using Azure.Search.Documents; using Azure.Search.Documents.Indexes; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel.Connectors.AzureAISearch; using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; namespace Microsoft.SemanticKernel; @@ -59,9 +62,19 @@ public static IServiceCollection AddAzureAISearchVectorStore(this IServiceCollec serviceId, (sp, obj) => { - var searchIndexClient = new SearchIndexClient(endpoint, tokenCredential); var selectedOptions = options ?? sp.GetService(); + // Build options for the Azure AI Search client and construct it. + var searchClientOptions = new SearchClientOptions(); + searchClientOptions.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; + if (selectedOptions?.JsonSerializerOptions != null) + { + searchClientOptions.Serializer = new JsonObjectSerializer(selectedOptions.JsonSerializerOptions); + } + + var searchIndexClient = new SearchIndexClient(endpoint, tokenCredential, searchClientOptions); + + // Construct the vector store. return new AzureAISearchVectorStore( searchIndexClient, selectedOptions); @@ -88,9 +101,19 @@ public static IServiceCollection AddAzureAISearchVectorStore(this IServiceCollec serviceId, (sp, obj) => { - var searchIndexClient = new SearchIndexClient(endpoint, credential); var selectedOptions = options ?? sp.GetService(); + // Build options for the Azure AI Search client and construct it. + var searchClientOptions = new SearchClientOptions(); + searchClientOptions.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; + if (selectedOptions?.JsonSerializerOptions != null) + { + searchClientOptions.Serializer = new JsonObjectSerializer(selectedOptions.JsonSerializerOptions); + } + + var searchIndexClient = new SearchIndexClient(endpoint, credential, searchClientOptions); + + // Construct the vector store. return new AzureAISearchVectorStore( searchIndexClient, selectedOptions); diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs index 2ca2bf9577f5..5a6b7e73b229 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStore.cs @@ -56,8 +56,16 @@ public IVectorStoreRecordCollection GetCollection( return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._searchIndexClient, name, vectorStoreRecordDefinition); } - var directlyCreatedStore = new AzureAISearchVectorStoreRecordCollection(this._searchIndexClient, name, new AzureAISearchVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; - return directlyCreatedStore!; + var recordCollection = new AzureAISearchVectorStoreRecordCollection( + this._searchIndexClient, + name, + new AzureAISearchVectorStoreRecordCollectionOptions() + { + JsonSerializerOptions = this._options.JsonSerializerOptions, + VectorStoreRecordDefinition = vectorStoreRecordDefinition + }) as IVectorStoreRecordCollection; + + return recordCollection!; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs index e8d54c8b7740..06e099efc4fa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreOptions.cs @@ -1,5 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Text.Json; +using Azure.Search.Documents.Indexes; + namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; /// @@ -8,7 +11,14 @@ namespace Microsoft.SemanticKernel.Connectors.AzureAISearch; public sealed class AzureAISearchVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if custom options are required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// public IAzureAISearchVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } + + /// + /// Gets or sets the JSON serializer options to use when converting between the data model and the Azure AI Search record. + /// Note that when using the default mapper and you are constructing your own , you will need + /// to provide the same set of both here and when constructing the . + /// + public JsonSerializerOptions? JsonSerializerOptions { get; init; } = null; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs index 21018b39c223..5a2873f224b4 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollection.cs @@ -77,17 +77,11 @@ public sealed class AzureAISearchVectorStoreRecordCollection : IVectorS /// Optional configuration options for this class. private readonly AzureAISearchVectorStoreRecordCollectionOptions _options; - /// A definition of the current storage model. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + /// A mapper to use for converting between the data model and the Azure AI Search record. + private readonly IVectorStoreRecordMapper? _mapper; - /// The storage name of the key field for the collections that this class is used with. - private readonly string _keyStoragePropertyName; - - /// The storage names of all non vector fields on the current model. - private readonly List _nonVectorStoragePropertyNames = new(); - - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. - private readonly Dictionary _storagePropertyNames = new(); + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; /// /// Initializes a new instance of the class. @@ -102,29 +96,42 @@ public AzureAISearchVectorStoreRecordCollection(SearchIndexClient searchIndexCli // Verify. Verify.NotNull(searchIndexClient); Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.JsonObjectCustomMapper is not null, s_supportedKeyTypes); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._searchIndexClient = searchIndexClient; this._collectionName = collectionName; this._options = options ?? new AzureAISearchVectorStoreRecordCollectionOptions(); this._searchClient = this._searchIndexClient.GetSearchClient(collectionName); - this._vectorStoreRecordDefinition = this._options.VectorStoreRecordDefinition ?? VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(TRecord), true); - var jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; + this._propertyReader = new VectorStoreRecordPropertyReader( + typeof(TRecord), + this._options.VectorStoreRecordDefinition, + new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + JsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default + }); // Validate property types. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify(typeof(TRecord).Name, this._vectorStoreRecordDefinition, supportsMultipleVectors: true, requiresAtLeastOneVector: false); - VectorStoreRecordPropertyReader.VerifyPropertyTypes([properties.KeyProperty], s_supportedKeyTypes, "Key"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.DataProperties, s_supportedDataTypes, "Data", supportEnumerable: true); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.VectorProperties, s_supportedVectorTypes, "Vector"); - - // Get storage names and store for later use. - this._storagePropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToJsonPropertyNameMap(properties, typeof(TRecord), jsonSerializerOptions); - this._keyStoragePropertyName = this._storagePropertyNames[properties.KeyProperty.DataModelPropertyName]; - this._nonVectorStoragePropertyNames = properties.DataProperties - .Cast() - .Concat([properties.KeyProperty]) - .Select(x => this._storagePropertyNames[x.DataModelPropertyName]) - .ToList(); + this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: true); + this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + + // Resolve mapper. + // First, if someone has provided a custom mapper, use that. + // If they didn't provide a custom mapper, and the record type is the generic data model, use the built in mapper for that. + // Otherwise, don't set the mapper, and we'll default to just using Azure AI Search's built in json serialization and deserialization. + if (this._options.JsonObjectCustomMapper is not null) + { + this._mapper = this._options.JsonObjectCustomMapper; + } + else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + this._mapper = new AzureAISearchGenericDataModelMapper(this._propertyReader.RecordDefinition) as IVectorStoreRecordMapper; + } } /// @@ -160,18 +167,22 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) var searchFields = new List(); // Loop through all properties and create the search fields. - foreach (var property in this._vectorStoreRecordDefinition.Properties) + foreach (var property in this._propertyReader.Properties) { // Key property. if (property is VectorStoreRecordKeyProperty keyProperty) { - searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapKeyField(keyProperty, this._keyStoragePropertyName)); + searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapKeyField( + keyProperty, + this._propertyReader.KeyPropertyJsonName)); } // Data property. if (property is VectorStoreRecordDataProperty dataProperty) { - searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapDataField(dataProperty, this._storagePropertyNames[dataProperty.DataModelPropertyName])); + searchFields.Add(AzureAISearchVectorStoreCollectionCreateMapping.MapDataField( + dataProperty, + this._propertyReader.GetJsonPropertyName(dataProperty.DataModelPropertyName))); } // Vector property. @@ -179,7 +190,7 @@ public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { (VectorSearchField vectorSearchField, VectorSearchAlgorithmConfiguration algorithmConfiguration, VectorSearchProfile vectorSearchProfile) = AzureAISearchVectorStoreCollectionCreateMapping.MapVectorField( vectorProperty, - this._storagePropertyNames[vectorProperty.DataModelPropertyName]); + this._propertyReader.GetJsonPropertyName(vectorProperty.DataModelPropertyName)); // Add the search field, plus its profile and algorithm configuration to the search config. searchFields.Add(vectorSearchField); @@ -256,7 +267,7 @@ public Task DeleteAsync(string key, DeleteRecordOptions? options = default, Canc // Remove record. return this.RunOperationAsync( "DeleteDocuments", - () => this._searchClient.DeleteDocumentsAsync(this._keyStoragePropertyName, [key], new IndexDocumentsOptions(), cancellationToken)); + () => this._searchClient.DeleteDocumentsAsync(this._propertyReader.KeyPropertyJsonName, [key], new IndexDocumentsOptions(), cancellationToken)); } /// @@ -267,7 +278,7 @@ public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? opti // Remove records. return this.RunOperationAsync( "DeleteDocuments", - () => this._searchClient.DeleteDocumentsAsync(this._keyStoragePropertyName, keys, new IndexDocumentsOptions(), cancellationToken)); + () => this._searchClient.DeleteDocumentsAsync(this._propertyReader.KeyPropertyJsonName, keys, new IndexDocumentsOptions(), cancellationToken)); } /// @@ -316,7 +327,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco const string OperationName = "GetDocument"; // Use the user provided mapper. - if (this._options.JsonObjectCustomMapper is not null) + if (this._mapper is not null) { var jsonObject = await this.RunOperationAsync( OperationName, @@ -331,7 +342,7 @@ public async IAsyncEnumerable UpsertBatchAsync(IEnumerable reco DatabaseName, this._collectionName, OperationName, - () => this._options.JsonObjectCustomMapper!.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); + () => this._mapper!.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); } // Use the built in Azure AI Search mapper. @@ -355,13 +366,13 @@ private Task> MapToStorageModelAndUploadDocumentA const string OperationName = "UploadDocuments"; // Use the user provided mapper. - if (this._options.JsonObjectCustomMapper is not null) + if (this._mapper is not null) { var jsonObjects = VectorStoreErrorHandler.RunModelConversion( DatabaseName, this._collectionName, OperationName, - () => records.Select(this._options.JsonObjectCustomMapper!.MapFromDataToStorageModel)); + () => records.Select(this._mapper!.MapFromDataToStorageModel)); return this.RunOperationAsync( OperationName, @@ -384,7 +395,8 @@ private GetDocumentOptions ConvertGetDocumentOptions(GetRecordOptions? options) var innerOptions = new GetDocumentOptions(); if (options?.IncludeVectors is false) { - innerOptions.SelectedFields.AddRange(this._nonVectorStoragePropertyNames); + innerOptions.SelectedFields.AddRange(this._propertyReader.KeyPropertyJsonNames); + innerOptions.SelectedFields.AddRange(this._propertyReader.DataPropertyJsonNames); } return innerOptions; diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs index 462dcd5d6e66..62e524a1c7b1 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureAISearch/AzureAISearchVectorStoreRecordCollectionOptions.cs @@ -33,7 +33,8 @@ public sealed class AzureAISearchVectorStoreRecordCollectionOptions /// /// Gets or sets the JSON serializer options to use when converting between the data model and the Azure AI Search record. - /// Note that when using the default mapper, you will need to provide the same set of both here and when constructing the . + /// Note that when using the default mapper and you are constructing your own , you will need + /// to provide the same set of both here and when constructing the . /// public JsonSerializerOptions? JsonSerializerOptions { get; init; } = null; } diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs new file mode 100644 index 000000000000..197faf81f093 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBConstants.cs @@ -0,0 +1,53 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// Constants for Azure CosmosDB MongoDB vector store implementation. +/// +internal static class AzureCosmosDBMongoDBConstants +{ + /// Reserved key property name in Azure CosmosDB MongoDB. + internal const string MongoReservedKeyPropertyName = "_id"; + + /// Reserved key property name in data model. + internal const string DataModelReservedKeyPropertyName = "Id"; + + /// A containing the supported key types. + internal static readonly HashSet SupportedKeyTypes = + [ + typeof(string) + ]; + + /// A containing the supported data property types. + internal static readonly HashSet SupportedDataTypes = + [ + typeof(bool), + typeof(bool?), + typeof(string), + typeof(int), + typeof(int?), + typeof(long), + typeof(long?), + typeof(float), + typeof(float?), + typeof(double), + typeof(double?), + typeof(decimal), + typeof(decimal?), + typeof(DateTime), + typeof(DateTime?), + ]; + + /// A containing the supported vector types. + internal static readonly HashSet SupportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBGenericDataModelMapper.cs new file mode 100644 index 000000000000..95c0cfbff137 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBGenericDataModelMapper.cs @@ -0,0 +1,181 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure CosmosDB MongoDB. +/// +internal sealed class AzureCosmosDBMongoDBGenericDataModelMapper : IVectorStoreRecordMapper, BsonDocument> +{ + /// A that defines the schema of the data in the database. + private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + + /// + /// Initializes a new instance of the class. + /// + /// A that defines the schema of the data in the database. + public AzureCosmosDBMongoDBGenericDataModelMapper(VectorStoreRecordDefinition vectorStoreRecordDefinition) + { + Verify.NotNull(vectorStoreRecordDefinition); + + this._vectorStoreRecordDefinition = vectorStoreRecordDefinition; + } + + /// + public BsonDocument MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + Verify.NotNull(dataModel); + + var document = new BsonDocument(); + + // Loop through all known properties and map each from the data model to the storage model. + foreach (var property in this._vectorStoreRecordDefinition.Properties) + { + var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; + + if (property is VectorStoreRecordKeyProperty keyProperty) + { + document[AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName] = dataModel.Key; + } + else if (property is VectorStoreRecordDataProperty dataProperty) + { + if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var dataValue)) + { + document[storagePropertyName] = BsonValue.Create(dataValue); + } + } + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.DataModelPropertyName, out var vectorValue)) + { + document[storagePropertyName] = BsonArray.Create(GetVectorArray(vectorValue)); + } + } + } + + return document; + } + + /// + public VectorStoreGenericDataModel MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options) + { + Verify.NotNull(storageModel); + + // Create variables to store the response properties. + string? key = null; + var dataProperties = new Dictionary(); + var vectorProperties = new Dictionary(); + + // Loop through all known properties and map each from the storage model to the data model. + foreach (var property in this._vectorStoreRecordDefinition.Properties) + { + var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; + + if (property is VectorStoreRecordKeyProperty keyProperty) + { + if (storageModel.TryGetValue(AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName, out var keyValue)) + { + key = keyValue.AsString; + } + } + else if (property is VectorStoreRecordDataProperty dataProperty) + { + if (!storageModel.TryGetValue(storagePropertyName, out var dataValue)) + { + continue; + } + + dataProperties.Add(dataProperty.DataModelPropertyName, GetDataPropertyValue(property.DataModelPropertyName, property.PropertyType, dataValue)); + } + else if (property is VectorStoreRecordVectorProperty vectorProperty && options.IncludeVectors) + { + if (!storageModel.TryGetValue(storagePropertyName, out var vectorValue)) + { + continue; + } + + vectorProperties.Add(vectorProperty.DataModelPropertyName, GetVectorPropertyValue(property.DataModelPropertyName, property.PropertyType, vectorValue)); + } + } + + if (key is null) + { + throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); + } + + return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; + } + + #region private + + private static object? GetDataPropertyValue(string propertyName, Type propertyType, BsonValue value) + { + if (value.IsBsonNull) + { + return null; + } + + return propertyType switch + { + Type t when t == typeof(bool) => value.AsBoolean, + Type t when t == typeof(bool?) => value.AsNullableBoolean, + Type t when t == typeof(string) => value.AsString, + Type t when t == typeof(int) => value.AsInt32, + Type t when t == typeof(int?) => value.AsNullableInt32, + Type t when t == typeof(long) => value.AsInt64, + Type t when t == typeof(long?) => value.AsNullableInt64, + Type t when t == typeof(float) => ((float)value.AsDouble), + Type t when t == typeof(float?) => ((float?)value.AsNullableDouble), + Type t when t == typeof(double) => value.AsDouble, + Type t when t == typeof(double?) => value.AsNullableDouble, + Type t when t == typeof(decimal) => value.AsDecimal, + Type t when t == typeof(decimal?) => value.AsNullableDecimal, + Type t when t == typeof(DateTime) => value.ToUniversalTime(), + Type t when t == typeof(DateTime?) => value.ToNullableUniversalTime(), + Type t when typeof(IEnumerable).IsAssignableFrom(t) => value.AsBsonArray.Select( + item => GetDataPropertyValue(propertyName, VectorStoreRecordPropertyVerification.GetCollectionElementType(t), item)), + _ => throw new NotSupportedException($"Mapping for property {propertyName} with type {propertyType.FullName} is not supported in generic data model.") + }; + } + + private static object? GetVectorPropertyValue(string propertyName, Type propertyType, BsonValue value) + { + if (value.IsBsonNull) + { + return null; + } + + return propertyType switch + { + Type t when t == typeof(ReadOnlyMemory) || t == typeof(ReadOnlyMemory?) => + new ReadOnlyMemory(value.AsBsonArray.Select(item => (float)item.AsDouble).ToArray()), + Type t when t == typeof(ReadOnlyMemory) || t == typeof(ReadOnlyMemory?) => + new ReadOnlyMemory(value.AsBsonArray.Select(item => item.AsDouble).ToArray()), + _ => throw new NotSupportedException($"Mapping for property {propertyName} with type {propertyType.FullName} is not supported in generic data model.") + }; + } + + private static object GetVectorArray(object? vector) + { + if (vector is null) + { + return Array.Empty(); + } + + return vector switch + { + ReadOnlyMemory memoryFloat => memoryFloat.ToArray(), + ReadOnlyMemory memoryDouble => memoryDouble.ToArray(), + _ => throw new NotSupportedException($"Mapping for type {vector.GetType().FullName} is not supported in generic data model.") + }; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs new file mode 100644 index 000000000000..807bb030dcfc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBKernelBuilderExtensions.cs @@ -0,0 +1,51 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using MongoDB.Driver; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Azure CosmosDB MongoDB instances on the . +/// +public static class AzureCosmosDBMongoDBKernelBuilderExtensions +{ + /// + /// Register a Azure CosmosDB MongoDB with the specified service ID + /// and where the Azure CosmosDB MongoDB is retrieved from the dependency injection container. + /// + /// The builder to register the on. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// The kernel builder. + public static IKernelBuilder AddAzureCosmosDBMongoDBVectorStore( + this IKernelBuilder builder, + AzureCosmosDBMongoDBVectorStoreOptions? options = default, + string? serviceId = default) + { + builder.Services.AddAzureCosmosDBMongoDBVectorStore(options, serviceId); + return builder; + } + + /// + /// Register a Azure CosmosDB MongoDB with the specified service ID + /// and where the Azure CosmosDB MongoDB is constructed using the provided and . + /// + /// The builder to register the on. + /// Connection string required to connect to Azure CosmosDB MongoDB. + /// Database name for Azure CosmosDB MongoDB. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// The kernel builder. + public static IKernelBuilder AddAzureCosmosDBMongoDBVectorStore( + this IKernelBuilder builder, + string connectionString, + string databaseName, + AzureCosmosDBMongoDBVectorStoreOptions? options = default, + string? serviceId = default) + { + builder.Services.AddAzureCosmosDBMongoDBVectorStore(connectionString, databaseName, options, serviceId); + return builder; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs new file mode 100644 index 000000000000..c335a2c0ce60 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBServiceCollectionExtensions.cs @@ -0,0 +1,80 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; +using MongoDB.Driver; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Azure CosmosDB MongoDB instances on an . +/// +public static class AzureCosmosDBMongoDBServiceCollectionExtensions +{ + /// + /// Register a Azure CosmosDB MongoDB with the specified service ID + /// and where the Azure CosmosDB MongoDB is retrieved from the dependency injection container. + /// + /// The to register the on. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// Service collection. + public static IServiceCollection AddAzureCosmosDBMongoDBVectorStore( + this IServiceCollection services, + AzureCosmosDBMongoDBVectorStoreOptions? options = default, + string? serviceId = default) + { + // If we are not constructing MongoDatabase, add the IVectorStore as transient, since we + // cannot make assumptions about how MongoDatabase is being managed. + services.AddKeyedTransient( + serviceId, + (sp, obj) => + { + var database = sp.GetRequiredService(); + var selectedOptions = options ?? sp.GetService(); + + return new AzureCosmosDBMongoDBVectorStore(database, options); + }); + + return services; + } + + /// + /// Register a Azure CosmosDB MongoDB with the specified service ID + /// and where the Azure CosmosDB MongoDB is constructed using the provided and . + /// + /// The to register the on. + /// Connection string required to connect to Azure CosmosDB MongoDB. + /// Database name for Azure CosmosDB MongoDB. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// Service collection. + public static IServiceCollection AddAzureCosmosDBMongoDBVectorStore( + this IServiceCollection services, + string connectionString, + string databaseName, + AzureCosmosDBMongoDBVectorStoreOptions? options = default, + string? serviceId = default) + { + // If we are constructing IMongoDatabase, add the IVectorStore as singleton, since we are managing the lifetime of it, + // and the recommendation from Mongo is to register it with a singleton lifetime. + services.AddKeyedSingleton( + serviceId, + (sp, obj) => + { + var settings = MongoClientSettings.FromConnectionString(connectionString); + settings.ApplicationName = HttpHeaderConstant.Values.UserAgent; + + var mongoClient = new MongoClient(settings); + var database = mongoClient.GetDatabase(databaseName); + + var selectedOptions = options ?? sp.GetService(); + + return new AzureCosmosDBMongoDBVectorStore(database, options); + }); + + return services; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs new file mode 100644 index 000000000000..7f907d068983 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStore.cs @@ -0,0 +1,77 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using Microsoft.SemanticKernel.Data; +using MongoDB.Driver; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// Class for accessing the list of collections in a Azure CosmosDB MongoDB vector store. +/// +/// +/// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. +/// +public sealed class AzureCosmosDBMongoDBVectorStore : IVectorStore +{ + /// that can be used to manage the collections in Azure CosmosDB MongoDB. + private readonly IMongoDatabase _mongoDatabase; + + /// Optional configuration options for this class. + private readonly AzureCosmosDBMongoDBVectorStoreOptions _options; + + /// + /// Initializes a new instance of the class. + /// + /// that can be used to manage the collections in Azure CosmosDB MongoDB. + /// Optional configuration options for this class. + public AzureCosmosDBMongoDBVectorStore(IMongoDatabase mongoDatabase, AzureCosmosDBMongoDBVectorStoreOptions? options = default) + { + Verify.NotNull(mongoDatabase); + + this._mongoDatabase = mongoDatabase; + this._options = options ?? new(); + } + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull + where TRecord : class + { + if (typeof(TKey) != typeof(string)) + { + throw new NotSupportedException("Only string keys are supported."); + } + + if (this._options.VectorStoreCollectionFactory is not null) + { + return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._mongoDatabase, name, vectorStoreRecordDefinition); + } + + var recordCollection = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + this._mongoDatabase, + name, + new() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + + return recordCollection!; + } + + /// + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using var cursor = await this._mongoDatabase + .ListCollectionNamesAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var name in cursor.Current) + { + yield return name; + } + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs new file mode 100644 index 000000000000..08df3aef81d8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreOptions.cs @@ -0,0 +1,14 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// Options when creating a +/// +public sealed class AzureCosmosDBMongoDBVectorStoreOptions +{ + /// + /// An optional factory to use for constructing instances, if a custom record collection is required. + /// + public IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..0e1a8cbc3fc9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollection.cs @@ -0,0 +1,447 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using MongoDB.Driver; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// Service for storing and retrieving vector records, that uses Azure CosmosDB MongoDB as the underlying storage. +/// +/// The data model to use for adding, updating and retrieving data from storage. +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollection : IVectorStoreRecordCollection where TRecord : class +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +{ + /// The name of this database for telemetry purposes. + private const string DatabaseName = "AzureCosmosDBMongoDB"; + + /// that can be used to manage the collections in Azure CosmosDB MongoDB. + private readonly IMongoDatabase _mongoDatabase; + + /// Azure CosmosDB MongoDB collection to perform record operations. + private readonly IMongoCollection _mongoCollection; + + /// Optional configuration options for this class. + private readonly AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions _options; + + /// Interface for mapping between a storage model, and the consumer record data model. + private readonly IVectorStoreRecordMapper _mapper; + + /// A dictionary that maps from a property name to the storage name that should be used when serializing it for data and vector properties. + private readonly Dictionary _storagePropertyNames; + + /// Collection of vector storage property names. + private readonly List _vectorStoragePropertyNames; + + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; + + /// + public string CollectionName { get; } + + /// + /// Initializes a new instance of the class. + /// + /// that can be used to manage the collections in Azure CosmosDB MongoDB. + /// The name of the collection that this will access. + /// Optional configuration options for this class. + public AzureCosmosDBMongoDBVectorStoreRecordCollection( + IMongoDatabase mongoDatabase, + string collectionName, + AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions? options = default) + { + // Verify. + Verify.NotNull(mongoDatabase); + Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.BsonDocumentCustomMapper is not null, AzureCosmosDBMongoDBConstants.SupportedKeyTypes); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); + + // Assign. + this._mongoDatabase = mongoDatabase; + this._mongoCollection = mongoDatabase.GetCollection(collectionName); + this.CollectionName = collectionName; + this._options = options ?? new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions(); + this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); + + this._storagePropertyNames = GetStoragePropertyNames(this._propertyReader.Properties, typeof(TRecord)); + + // Use Mongo reserved key property name as storage key property name + this._storagePropertyNames[this._propertyReader.KeyPropertyName] = AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName; + + this._vectorStoragePropertyNames = this._propertyReader.VectorProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); + + this._mapper = this.InitializeMapper(); + } + + /// + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + => this.RunOperationAsync("ListCollectionNames", () => this.InternalCollectionExistsAsync(cancellationToken)); + + /// + public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) + { + await this.RunOperationAsync("CreateCollection", + () => this._mongoDatabase.CreateCollectionAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); + + await this.RunOperationAsync("CreateIndex", + () => this.CreateIndexAsync(this.CollectionName, cancellationToken: cancellationToken)).ConfigureAwait(false); + } + + /// + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + { + if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) + { + await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); + } + } + + /// + public async Task DeleteAsync(string key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(key); + + await this.RunOperationAsync("DeleteOne", () => this._mongoCollection.DeleteOneAsync(this.GetFilterById(key), cancellationToken)) + .ConfigureAwait(false); + } + + /// + public async Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(keys); + + await this.RunOperationAsync("DeleteMany", () => this._mongoCollection.DeleteManyAsync(this.GetFilterByIds(keys), cancellationToken)) + .ConfigureAwait(false); + } + + /// + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + => this.RunOperationAsync("DropCollection", () => this._mongoDatabase.DropCollectionAsync(this.CollectionName, cancellationToken)); + + /// + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNullOrWhiteSpace(key); + + const string OperationName = "Find"; + + var includeVectors = options?.IncludeVectors ?? false; + + var record = await this.RunOperationAsync(OperationName, async () => + { + using var cursor = await this + .FindAsync(this.GetFilterById(key), options, cancellationToken) + .ConfigureAwait(false); + + return await cursor.SingleOrDefaultAsync(cancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + + if (record is null) + { + return null; + } + + return VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromStorageToDataModel(record, new() { IncludeVectors = includeVectors })); + } + + /// + public async IAsyncEnumerable GetBatchAsync( + IEnumerable keys, + GetRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(keys); + + const string OperationName = "Find"; + + using var cursor = await this + .FindAsync(this.GetFilterByIds(keys), options, cancellationToken) + .ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var record in cursor.Current) + { + if (record is not null) + { + yield return VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromStorageToDataModel(record, new())); + } + } + } + } + + /// + public Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + { + Verify.NotNull(record); + + const string OperationName = "ReplaceOne"; + + var replaceOptions = new ReplaceOptions { IsUpsert = true }; + var storageModel = VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record)); + + var key = storageModel[AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName].AsString; + + return this.RunOperationAsync(OperationName, async () => + { + await this._mongoCollection + .ReplaceOneAsync(this.GetFilterById(key), storageModel, replaceOptions, cancellationToken) + .ConfigureAwait(false); + + return key; + }); + } + + /// + public async IAsyncEnumerable UpsertBatchAsync( + IEnumerable records, + UpsertRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(records); + + var tasks = records.Select(record => this.UpsertAsync(record, options, cancellationToken)); + var results = await Task.WhenAll(tasks).ConfigureAwait(false); + + foreach (var result in results) + { + if (result is not null) + { + yield return result; + } + } + } + + #region private + + private async Task CreateIndexAsync(string collectionName, CancellationToken cancellationToken) + { + var indexCursor = await this._mongoCollection.Indexes.ListAsync(cancellationToken).ConfigureAwait(false); + var indexes = indexCursor.ToList(cancellationToken).Select(index => index["name"].ToString()) ?? []; + var uniqueIndexes = new HashSet(indexes); + + var indexArray = new BsonArray(); + + // Create separate index for each vector property + foreach (var property in this._propertyReader.VectorProperties) + { + // Use index name same as vector property name with underscore + var vectorPropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + var indexName = $"{vectorPropertyName}_"; + + // If index already exists, proceed to the next vector property + if (uniqueIndexes.Contains(indexName)) + { + continue; + } + + // Otherwise, create a new index + var searchOptions = new BsonDocument + { + { "kind", GetIndexKind(property.IndexKind, vectorPropertyName) }, + { "numLists", this._options.NumLists }, + { "similarity", GetDistanceFunction(property.DistanceFunction, vectorPropertyName) }, + { "dimensions", property.Dimensions } + }; + + if (this._options.EfConstruction is not null) + { + searchOptions["efConstruction"] = this._options.EfConstruction; + } + + var indexDocument = new BsonDocument + { + ["name"] = indexName, + ["key"] = new BsonDocument { [vectorPropertyName] = "cosmosSearch" }, + ["cosmosSearchOptions"] = searchOptions + }; + + indexArray.Add(indexDocument); + } + + if (indexArray.Count > 0) + { + var createIndexCommand = new BsonDocument + { + { "createIndexes", collectionName }, + { "indexes", indexArray } + }; + + await this._mongoDatabase.RunCommandAsync(createIndexCommand, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private async Task> FindAsync(FilterDefinition filter, GetRecordOptions? options, CancellationToken cancellationToken) + { + ProjectionDefinitionBuilder projectionBuilder = Builders.Projection; + ProjectionDefinition? projectionDefinition = null; + + var includeVectors = options?.IncludeVectors ?? false; + + if (!includeVectors && this._vectorStoragePropertyNames.Count > 0) + { + foreach (var vectorPropertyName in this._vectorStoragePropertyNames) + { + projectionDefinition = projectionDefinition is not null ? + projectionDefinition.Exclude(vectorPropertyName) : + projectionBuilder.Exclude(vectorPropertyName); + } + } + + var findOptions = projectionDefinition is not null ? + new FindOptions { Projection = projectionDefinition } : + null; + + return await this._mongoCollection.FindAsync(filter, findOptions, cancellationToken).ConfigureAwait(false); + } + + private FilterDefinition GetFilterById(string id) + => Builders.Filter.Eq(document => document[AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName], id); + + private FilterDefinition GetFilterByIds(IEnumerable ids) + => Builders.Filter.In(document => document[AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName].AsString, ids); + + private async Task InternalCollectionExistsAsync(CancellationToken cancellationToken) + { + var filter = new BsonDocument("name", this.CollectionName); + var options = new ListCollectionNamesOptions { Filter = filter }; + + using var cursor = await this._mongoDatabase.ListCollectionNamesAsync(options, cancellationToken: cancellationToken).ConfigureAwait(false); + + return await cursor.AnyAsync(cancellationToken).ConfigureAwait(false); + } + + private async Task RunOperationAsync(string operationName, Func operation) + { + try + { + await operation.Invoke().ConfigureAwait(false); + } + catch (Exception ex) + { + throw new VectorStoreOperationException("Call to vector store failed.", ex) + { + VectorStoreType = DatabaseName, + CollectionName = this.CollectionName, + OperationName = operationName + }; + } + } + + private async Task RunOperationAsync(string operationName, Func> operation) + { + try + { + return await operation.Invoke().ConfigureAwait(false); + } + catch (Exception ex) + { + throw new VectorStoreOperationException("Call to vector store failed.", ex) + { + VectorStoreType = DatabaseName, + CollectionName = this.CollectionName, + OperationName = operationName + }; + } + } + + /// + /// More information about Azure CosmosDB for MongoDB index kinds here: . + /// + private static string GetIndexKind(string? indexKind, string vectorPropertyName) + { + return indexKind switch + { + IndexKind.Hnsw => "vector-hnsw", + IndexKind.IvfFlat => "vector-ivf", + _ => throw new InvalidOperationException($"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB for MongoDB VectorStore.") + }; + } + + /// + /// More information about Azure CosmosDB for MongoDB distance functions here: . + /// + private static string GetDistanceFunction(string? distanceFunction, string vectorPropertyName) + { + return distanceFunction switch + { + DistanceFunction.CosineDistance => "COS", + DistanceFunction.DotProductSimilarity => "IP", + DistanceFunction.EuclideanDistance => "L2", + _ => throw new InvalidOperationException($"Distance function '{distanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB for MongoDB VectorStore.") + }; + } + + /// + /// Gets storage property names taking into account BSON serialization attributes. + /// + private static Dictionary GetStoragePropertyNames( + IReadOnlyList properties, + Type dataModel) + { + var storagePropertyNames = new Dictionary(); + + foreach (var property in properties) + { + var propertyInfo = dataModel.GetProperty(property.DataModelPropertyName); + string propertyName; + + if (propertyInfo != null) + { + var bsonElementAttribute = propertyInfo.GetCustomAttribute(); + + propertyName = bsonElementAttribute?.ElementName ?? property.DataModelPropertyName; + } + else + { + propertyName = property.DataModelPropertyName; + } + + storagePropertyNames[property.DataModelPropertyName] = propertyName; + } + + return storagePropertyNames; + } + + /// + /// Returns custom mapper, generic data model mapper or default record mapper. + /// + private IVectorStoreRecordMapper InitializeMapper() + { + if (this._options.BsonDocumentCustomMapper is not null) + { + return this._options.BsonDocumentCustomMapper; + } + + if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + return (new AzureCosmosDBMongoDBGenericDataModelMapper(this._propertyReader.RecordDefinition) as IVectorStoreRecordMapper)!; + } + + return new AzureCosmosDBMongoDBVectorStoreRecordMapper(this._propertyReader); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs new file mode 100644 index 000000000000..11b21a1e84e7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions.cs @@ -0,0 +1,42 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// Options when creating a . +/// +public sealed class AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions where TRecord : class +{ + /// + /// Gets or sets an optional custom mapper to use when converting between the data model and the Azure CosmosDB MongoDB BSON object. + /// + public IVectorStoreRecordMapper? BsonDocumentCustomMapper { get; init; } = null; + + /// + /// Gets or sets an optional record definition that defines the schema of the record type. + /// + /// + /// If not provided, the schema will be inferred from the record model class using reflection. + /// In this case, the record model properties must be annotated with the appropriate attributes to indicate their usage. + /// See , and . + /// + public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// This integer is the number of clusters that the inverted file (IVF) index uses to group the vector data. Default is 1. + /// We recommend that numLists is set to documentCount/1000 for up to 1 million documents and to sqrt(documentCount) + /// for more than 1 million documents. Using a numLists value of 1 is akin to performing brute-force search, which has + /// limited performance. + /// + public int NumLists { get; set; } = 1; + + /// + /// The size of the dynamic candidate list for constructing the graph (64 by default, minimum value is 4, + /// maximum value is 1000). Higher ef_construction will result in better index quality and higher accuracy, but it will + /// also increase the time required to build the index. EfConstruction has to be at least 2 * m + /// + public int? EfConstruction { get; set; } = null; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordMapper.cs new file mode 100644 index 000000000000..4bf0a14d5d51 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordMapper.cs @@ -0,0 +1,67 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Reflection; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; +using MongoDB.Bson.Serialization; +using MongoDB.Bson.Serialization.Attributes; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +internal sealed class AzureCosmosDBMongoDBVectorStoreRecordMapper : IVectorStoreRecordMapper + where TRecord : class +{ + /// A key property info of the data model. + private readonly PropertyInfo _keyProperty; + + /// A key property name of the data model. + private readonly string _keyPropertyName; + + /// + /// Initializes a new instance of the class. + /// + /// A helper to access property information for the current data model and record definition. + public AzureCosmosDBMongoDBVectorStoreRecordMapper(VectorStoreRecordPropertyReader propertyReader) + { + propertyReader.VerifyKeyProperties(AzureCosmosDBMongoDBConstants.SupportedKeyTypes); + propertyReader.VerifyDataProperties(AzureCosmosDBMongoDBConstants.SupportedDataTypes, supportEnumerable: true); + propertyReader.VerifyVectorProperties(AzureCosmosDBMongoDBConstants.SupportedVectorTypes); + + this._keyPropertyName = propertyReader.KeyPropertyName; + this._keyProperty = propertyReader.KeyPropertyInfo; + } + + public BsonDocument MapFromDataToStorageModel(TRecord dataModel) + { + var document = dataModel.ToBsonDocument(); + + // Handle key property mapping due to reserved key name in Mongo. + if (!document.Contains(AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName)) + { + var value = document[this._keyPropertyName]; + + document.Remove(this._keyPropertyName); + + document[AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName] = value; + } + + return document; + } + + public TRecord MapFromStorageToDataModel(BsonDocument storageModel, StorageToDataModelMapperOptions options) + { + // Handle key property mapping due to reserved key name in Mongo. + if (!this._keyPropertyName.Equals(AzureCosmosDBMongoDBConstants.DataModelReservedKeyPropertyName, StringComparison.OrdinalIgnoreCase) && + this._keyProperty.GetCustomAttribute() is null) + { + var value = storageModel[AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName]; + + storageModel.Remove(AzureCosmosDBMongoDBConstants.MongoReservedKeyPropertyName); + + storageModel[this._keyPropertyName] = value; + } + + return BsonSerializer.Deserialize(storageModel); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj index 747709f993cc..9ce9d24d1aed 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/Connectors.Memory.AzureCosmosDBMongoDB.csproj @@ -23,6 +23,10 @@ + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs new file mode 100644 index 000000000000..39231a8bf7a8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBMongoDB/IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory.cs @@ -0,0 +1,25 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel.Data; +using MongoDB.Driver; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; + +/// +/// Interface for constructing Azure CosmosDB MongoDB instances when using to retrieve these. +/// +public interface IAzureCosmosDBMongoDBVectorStoreRecordCollectionFactory +{ + /// + /// Constructs a new instance of the . + /// + /// The data type of the record key. + /// The data model to use for adding, updating and retrieving data from storage. + /// that can be used to manage the collections in Azure CosmosDB MongoDB. + /// The name of the collection to connect to. + /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. + /// The new instance of . + IVectorStoreRecordCollection CreateVectorStoreRecordCollection(IMongoDatabase mongoDatabase, string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition) + where TKey : notnull + where TRecord : class; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLCompositeKey.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLCompositeKey.cs new file mode 100644 index 000000000000..24ec91e9ba12 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLCompositeKey.cs @@ -0,0 +1,19 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Composite key for Azure CosmosDB NoSQL record with record and partition keys. +/// +public sealed class AzureCosmosDBNoSQLCompositeKey(string recordKey, string partitionKey) +{ + /// + /// Value of record key. + /// + public string RecordKey { get; } = recordKey; + + /// + /// Value of partition key. + /// + public string PartitionKey { get; } = partitionKey; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs new file mode 100644 index 000000000000..2c808307217f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLConstants.cs @@ -0,0 +1,9 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +internal static class AzureCosmosDBNoSQLConstants +{ + /// Reserved key property name in Azure CosmosDB NoSQL. + internal const string ReservedKeyPropertyName = "id"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs new file mode 100644 index 000000000000..87414ef05917 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLGenericDataModelMapper.cs @@ -0,0 +1,124 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Azure CosmosDB NoSQL. +/// +internal sealed class AzureCosmosDBNoSQLGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +{ + /// A default for serialization/deserialization of vector properties. + private static readonly JsonSerializerOptions s_vectorJsonSerializerOptions = new() + { + Converters = { new AzureCosmosDBNoSQLReadOnlyMemoryByteConverter() } + }; + + /// A for serialization/deserialization of data properties + private readonly JsonSerializerOptions _jsonSerializerOptions; + + /// The list of properties from the record definition. + private readonly IReadOnlyList _properties; + + /// A dictionary that maps from a property name to the storage name. + public readonly Dictionary _storagePropertyNames; + + public AzureCosmosDBNoSQLGenericDataModelMapper( + IReadOnlyList properties, + Dictionary storagePropertyNames, + JsonSerializerOptions jsonSerializerOptions) + { + Verify.NotNull(properties); + + this._properties = properties; + this._storagePropertyNames = storagePropertyNames; + this._jsonSerializerOptions = jsonSerializerOptions; + } + + public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + Verify.NotNull(dataModel); + + var jsonObject = new JsonObject(); + + // Loop through all known properties and map each from the data model to the storage model. + foreach (var property in this._properties) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + + if (property is VectorStoreRecordKeyProperty keyProperty) + { + jsonObject[AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName] = dataModel.Key; + } + else if (property is VectorStoreRecordDataProperty dataProperty) + { + if (dataModel.Data is not null && dataModel.Data.TryGetValue(dataProperty.DataModelPropertyName, out var dataValue)) + { + jsonObject[storagePropertyName] = dataValue is not null ? + JsonSerializer.SerializeToNode(dataValue, property.PropertyType, this._jsonSerializerOptions) : + null; + } + } + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(vectorProperty.DataModelPropertyName, out var vectorValue)) + { + jsonObject[storagePropertyName] = vectorValue is not null ? + JsonSerializer.SerializeToNode(vectorValue, property.PropertyType, s_vectorJsonSerializerOptions) : + null; + } + } + } + + return jsonObject; + } + + public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + { + Verify.NotNull(storageModel); + + // Create variables to store the response properties. + string? key = null; + var dataProperties = new Dictionary(); + var vectorProperties = new Dictionary(); + + // Loop through all known properties and map each from the storage model to the data model. + foreach (var property in this._properties) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + + if (property is VectorStoreRecordKeyProperty keyProperty) + { + if (storageModel.TryGetPropertyValue(AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, out var keyValue)) + { + key = keyValue?.GetValue(); + } + } + else if (property is VectorStoreRecordDataProperty dataProperty) + { + if (storageModel.TryGetPropertyValue(storagePropertyName, out var dataValue)) + { + dataProperties.Add(property.DataModelPropertyName, dataValue.Deserialize(property.PropertyType, this._jsonSerializerOptions)); + } + } + else if (property is VectorStoreRecordVectorProperty vectorProperty && options.IncludeVectors) + { + if (storageModel.TryGetPropertyValue(storagePropertyName, out var vectorValue)) + { + vectorProperties.Add(property.DataModelPropertyName, vectorValue.Deserialize(property.PropertyType, s_vectorJsonSerializerOptions)); + } + } + } + + if (key is null) + { + throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); + } + + return new VectorStoreGenericDataModel(key) { Data = dataProperties, Vectors = vectorProperties }; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs new file mode 100644 index 000000000000..0f1e3744f36c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLKernelBuilderExtensions.cs @@ -0,0 +1,56 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Azure CosmosDB NoSQL instances on the . +/// +public static class AzureCosmosDBNoSQLKernelBuilderExtensions +{ + /// + /// Register an Azure CosmosDB NoSQL with the specified service ID + /// and where the Azure CosmosDB NoSQL is retrieved from the dependency injection container. + /// + /// The builder to register the on. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// The kernel builder. + public static IKernelBuilder AddAzureCosmosDBNoSQLVectorStore( + this IKernelBuilder builder, + AzureCosmosDBNoSQLVectorStoreOptions? options = default, + string? serviceId = default) + { + builder.Services.AddAzureCosmosDBNoSQLVectorStore(options, serviceId); + return builder; + } + + /// + /// Register an Azure CosmosDB NoSQL with the specified service ID + /// and where the Azure CosmosDB NoSQL is constructed using the provided and . + /// + /// The builder to register the on. + /// Connection string required to connect to Azure CosmosDB NoSQL. + /// Database name for Azure CosmosDB NoSQL. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// The kernel builder. + public static IKernelBuilder AddAzureCosmosDBNoSQLVectorStore( + this IKernelBuilder builder, + string connectionString, + string databaseName, + AzureCosmosDBNoSQLVectorStoreOptions? options = default, + string? serviceId = default) + { + builder.Services.AddAzureCosmosDBNoSQLVectorStore( + connectionString, + databaseName, + options, + serviceId); + + return builder; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs index d9d5b67ee4af..e526d55eac9c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs @@ -53,7 +53,7 @@ public AzureCosmosDBNoSQLMemoryStore( new CosmosClientOptions { ApplicationName = applicationName ?? HttpHeaderConstant.Values.UserAgent, - Serializer = new CosmosSystemTextJsonSerializer(JsonSerializerOptions.Default), + UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default, }), databaseName, new VectorEmbeddingPolicy( @@ -100,7 +100,7 @@ public AzureCosmosDBNoSQLMemoryStore( new CosmosClientOptions { ApplicationName = applicationName ?? HttpHeaderConstant.Values.UserAgent, - Serializer = new CosmosSystemTextJsonSerializer(JsonSerializerOptions.Default), + UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default, }), databaseName, vectorEmbeddingPolicy, diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLReadOnlyMemoryByteConverter.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLReadOnlyMemoryByteConverter.cs new file mode 100644 index 000000000000..951ad3b21fbc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLReadOnlyMemoryByteConverter.cs @@ -0,0 +1,54 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Contains serialization and deserialization logic for of +/// to avoid default base64 encoding/decoding. +/// +internal sealed class AzureCosmosDBNoSQLReadOnlyMemoryByteConverter : JsonConverter> +{ + public override ReadOnlyMemory Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType != JsonTokenType.StartArray) + { + throw new JsonException("Expected StartArray token when deserializing ReadOnlyMemory."); + } + + var byteList = new List(); + + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndArray) + { + break; + } + + if (reader.TokenType != JsonTokenType.Number) + { + throw new JsonException("Expected byte values in the array."); + } + + byteList.Add(reader.GetByte()); + } + + return new ReadOnlyMemory(byteList.ToArray()); + } + + public override void Write(Utf8JsonWriter writer, ReadOnlyMemory value, JsonSerializerOptions options) + { + writer.WriteStartArray(); + + foreach (var b in value.ToArray()) + { + writer.WriteNumberValue(b); + } + + writer.WriteEndArray(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs new file mode 100644 index 000000000000..51f8cb41b333 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLServiceCollectionExtensions.cs @@ -0,0 +1,81 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Azure CosmosDB NoSQL instances on an . +/// +public static class AzureCosmosDBNoSQLServiceCollectionExtensions +{ + /// + /// Register an Azure CosmosDB NoSQL with the specified service ID + /// and where the Azure CosmosDB NoSQL is retrieved from the dependency injection container. + /// + /// The to register the on. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// Service collection. + public static IServiceCollection AddAzureCosmosDBNoSQLVectorStore( + this IServiceCollection services, + AzureCosmosDBNoSQLVectorStoreOptions? options = default, + string? serviceId = default) + { + // If we are not constructing Database, add the IVectorStore as transient, since we + // cannot make assumptions about how Database is being managed. + services.AddKeyedTransient( + serviceId, + (sp, obj) => + { + var database = sp.GetRequiredService(); + var selectedOptions = options ?? sp.GetService(); + + return new AzureCosmosDBNoSQLVectorStore(database, options); + }); + + return services; + } + + /// + /// Register an Azure CosmosDB NoSQL with the specified service ID + /// and where the Azure CosmosDB NoSQL is constructed using the provided and . + /// + /// The to register the on. + /// Connection string required to connect to Azure CosmosDB NoSQL. + /// Database name for Azure CosmosDB NoSQL. + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// Service collection. + public static IServiceCollection AddAzureCosmosDBNoSQLVectorStore( + this IServiceCollection services, + string connectionString, + string databaseName, + AzureCosmosDBNoSQLVectorStoreOptions? options = default, + string? serviceId = default) + { + // If we are constructing Database, add the IVectorStore as singleton. + services.AddKeyedSingleton( + serviceId, + (sp, obj) => + { + var cosmosClient = new CosmosClient(connectionString, new() + { + ApplicationName = HttpHeaderConstant.Values.UserAgent, + UseSystemTextJsonSerializerWithOptions = options?.JsonSerializerOptions ?? JsonSerializerOptions.Default, + }); + + var database = cosmosClient.GetDatabase(databaseName); + var selectedOptions = options ?? sp.GetService(); + + return new AzureCosmosDBNoSQLVectorStore(database, options); + }); + + return services; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs new file mode 100644 index 000000000000..ea1ec083a484 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStore.cs @@ -0,0 +1,86 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Class for accessing the list of collections in a Azure CosmosDB NoSQL vector store. +/// +/// +/// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. +/// +public sealed class AzureCosmosDBNoSQLVectorStore : IVectorStore +{ + /// that can be used to manage the collections in Azure CosmosDB NoSQL. + private readonly Database _database; + + /// Optional configuration options for this class. + private readonly AzureCosmosDBNoSQLVectorStoreOptions _options; + + /// + /// Initializes a new instance of the class. + /// + /// that can be used to manage the collections in Azure CosmosDB NoSQL. + /// Optional configuration options for this class. + public AzureCosmosDBNoSQLVectorStore(Database database, AzureCosmosDBNoSQLVectorStoreOptions? options = null) + { + Verify.NotNull(database); + + this._database = database; + this._options = options ?? new(); + } + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull + where TRecord : class + { + if (typeof(TKey) != typeof(string) && typeof(TKey) != typeof(AzureCosmosDBNoSQLCompositeKey)) + { + throw new NotSupportedException($"Only {nameof(String)} and {nameof(AzureCosmosDBNoSQLCompositeKey)} keys are supported."); + } + + if (this._options.VectorStoreCollectionFactory is not null) + { + return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection( + this._database, + name, + vectorStoreRecordDefinition); + } + + var recordCollection = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + this._database, + name, + new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + JsonSerializerOptions = this._options.JsonSerializerOptions + }) as IVectorStoreRecordCollection; + + return recordCollection!; + } + + /// + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + const string Query = "SELECT VALUE(c.id) FROM c"; + + using var feedIterator = this._database.GetContainerQueryIterator(Query); + + while (feedIterator.HasMoreResults) + { + var next = await feedIterator.ReadNextAsync(cancellationToken).ConfigureAwait(false); + + foreach (var containerName in next.Resource) + { + yield return containerName; + } + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs new file mode 100644 index 000000000000..d6f1bef56e0b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreOptions.cs @@ -0,0 +1,21 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Options when creating a . +/// +public sealed class AzureCosmosDBNoSQLVectorStoreOptions +{ + /// + /// An optional factory to use for constructing instances, if a custom record collection is required. + /// + public IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } + + /// + /// Gets or sets the JSON serializer options to use when converting between the data model and the Azure CosmosDB NoSQL record. + /// + public JsonSerializerOptions? JsonSerializerOptions { get; init; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..a7eb9e465b41 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollection.cs @@ -0,0 +1,678 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Data; +using DistanceFunction = Microsoft.Azure.Cosmos.DistanceFunction; +using IndexKind = Microsoft.SemanticKernel.Data.IndexKind; +using SKDistanceFunction = Microsoft.SemanticKernel.Data.DistanceFunction; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Service for storing and retrieving vector records, that uses Azure CosmosDB NoSQL as the underlying storage. +/// +/// The data model to use for adding, updating and retrieving data from storage. +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollection : + IVectorStoreRecordCollection, + IVectorStoreRecordCollection + where TRecord : class +#pragma warning restore CA1711 // Identifiers should not have incorrect +{ + /// The name of this database for telemetry purposes. + private const string DatabaseName = "AzureCosmosDBNoSQL"; + + /// A of types that a key on the provided model may have. + private static readonly HashSet s_supportedKeyTypes = + [ + typeof(string) + ]; + + /// A of types that data properties on the provided model may have. + private static readonly HashSet s_supportedDataTypes = + [ + typeof(bool), + typeof(bool?), + typeof(string), + typeof(int), + typeof(int?), + typeof(long), + typeof(long?), + typeof(float), + typeof(float?), + typeof(double), + typeof(double?), + typeof(DateTimeOffset), + typeof(DateTimeOffset?), + ]; + + /// A of types that vector properties on the provided model may have, based on enumeration. + private static readonly HashSet s_supportedVectorTypes = + [ + // Float16 +#if NET5_0_OR_GREATER + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), +#endif + // Float32 + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + // Uint8 + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + // Int8 + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + ]; + + /// that can be used to manage the collections in Azure CosmosDB NoSQL. + private readonly Database _database; + + /// Optional configuration options for this class. + private readonly AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions _options; + + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; + + /// The storage names of all non vector fields on the current model. + private readonly List _nonVectorStoragePropertyNames = []; + + /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. + private readonly Dictionary _storagePropertyNames = []; + + /// The storage name of the key field for the collections that this class is used with. + private readonly string _keyStoragePropertyName; + + /// The property name to use as partition key. + private readonly string _partitionKeyPropertyName; + + /// The storage property name to use as partition key. + private readonly string _partitionKeyStoragePropertyName; + + /// The mapper to use when mapping between the consumer data model and the Azure CosmosDB NoSQL record. + private readonly IVectorStoreRecordMapper _mapper; + + /// + public string CollectionName { get; } + + /// + /// Initializes a new instance of the class. + /// + /// that can be used to manage the collections in Azure CosmosDB NoSQL. + /// The name of the collection that this will access. + /// Optional configuration options for this class. + public AzureCosmosDBNoSQLVectorStoreRecordCollection( + Database database, + string collectionName, + AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions? options = default) + { + // Verify. + Verify.NotNull(database); + Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.JsonObjectCustomMapper is not null, s_supportedKeyTypes); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); + + // Assign. + this._database = database; + this.CollectionName = collectionName; + this._options = options ?? new(); + var jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; + this._propertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + JsonSerializerOptions = jsonSerializerOptions + }); + + // Validate property types. + this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: true); + this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + + // Get storage names and store for later use. + this._storagePropertyNames = this._propertyReader.JsonPropertyNamesMap.ToDictionary(x => x.Key, x => x.Value); + + // Assign mapper. + this._mapper = this.InitializeMapper(jsonSerializerOptions); + + // Use Azure CosmosDB NoSQL reserved key property name as storage key property name. + this._storagePropertyNames[this._propertyReader.KeyPropertyName] = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName; + this._keyStoragePropertyName = AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName; + + // If partition key is not provided, use key property as a partition key. + this._partitionKeyPropertyName = !string.IsNullOrWhiteSpace(this._options.PartitionKeyPropertyName) ? + this._options.PartitionKeyPropertyName! : + this._propertyReader.KeyPropertyName; + + VerifyPartitionKeyProperty(this._partitionKeyPropertyName, this._propertyReader.Properties); + + this._partitionKeyStoragePropertyName = this._storagePropertyNames[this._partitionKeyPropertyName]; + + this._nonVectorStoragePropertyNames = this._propertyReader.DataProperties + .Cast() + .Concat([this._propertyReader.KeyProperty]) + .Select(x => this._storagePropertyNames[x.DataModelPropertyName]) + .ToList(); + } + + /// + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + { + return this.RunOperationAsync("GetContainerQueryIterator", async () => + { + const string Query = "SELECT VALUE(c.id) FROM c WHERE c.id = @collectionName"; + + var queryDefinition = new QueryDefinition(Query).WithParameter("@collectionName", this.CollectionName); + + using var feedIterator = this._database.GetContainerQueryIterator(queryDefinition); + + while (feedIterator.HasMoreResults) + { + var next = await feedIterator.ReadNextAsync(cancellationToken).ConfigureAwait(false); + + foreach (var containerName in next.Resource) + { + return true; + } + } + + return false; + }); + } + + /// + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + { + return this.RunOperationAsync("CreateContainer", () => + this._database.CreateContainerAsync(this.GetContainerProperties(), cancellationToken: cancellationToken)); + } + + /// + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + { + if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) + { + await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); + } + } + + /// + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + return this.RunOperationAsync("DeleteContainer", () => + this._database + .GetContainer(this.CollectionName) + .DeleteContainerAsync(cancellationToken: cancellationToken)); + } + + #region Implementation of IVectorStoreRecordCollection + + /// + public Task DeleteAsync(string key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + // Use record key as partition key + var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); + + return this.InternalDeleteAsync([compositeKey], cancellationToken); + } + + /// + public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + // Use record keys as partition keys + var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); + + return this.InternalDeleteAsync(compositeKeys, cancellationToken); + } + + /// + public async Task GetAsync(string key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + // Use record key as partition key + var compositeKey = new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key); + + return await this.InternalGetAsync([compositeKey], options, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + /// + public async IAsyncEnumerable GetBatchAsync( + IEnumerable keys, + GetRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + // Use record keys as partition keys + var compositeKeys = keys.Select(key => new AzureCosmosDBNoSQLCompositeKey(recordKey: key, partitionKey: key)); + + await foreach (var record in this.InternalGetAsync(compositeKeys, options, cancellationToken).ConfigureAwait(false)) + { + if (record is not null) + { + yield return record; + } + } + } + + /// + public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + { + var key = await this.InternalUpsertAsync(record, cancellationToken).ConfigureAwait(false); + + return key.RecordKey; + } + + /// + public async IAsyncEnumerable UpsertBatchAsync( + IEnumerable records, + UpsertRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(records); + + var tasks = records.Select(record => this.InternalUpsertAsync(record, cancellationToken)); + + var keys = await Task.WhenAll(tasks).ConfigureAwait(false); + + foreach (var key in keys) + { + if (key is not null) + { + yield return key.RecordKey; + } + } + } + + #endregion + + #region Implementation of IVectorStoreRecordCollection + + /// + public async Task GetAsync(AzureCosmosDBNoSQLCompositeKey key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return await this.InternalGetAsync([key], options, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + /// + public async IAsyncEnumerable GetBatchAsync( + IEnumerable keys, + GetRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await foreach (var record in this.InternalGetAsync(keys, options, cancellationToken).ConfigureAwait(false)) + { + if (record is not null) + { + yield return record; + } + } + } + + /// + public Task DeleteAsync(AzureCosmosDBNoSQLCompositeKey key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return this.InternalDeleteAsync([key], cancellationToken); + } + + /// + public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return this.InternalDeleteAsync(keys, cancellationToken); + } + + /// + Task IVectorStoreRecordCollection.UpsertAsync(TRecord record, UpsertRecordOptions? options, CancellationToken cancellationToken) + { + return this.InternalUpsertAsync(record, cancellationToken); + } + + /// + async IAsyncEnumerable IVectorStoreRecordCollection.UpsertBatchAsync( + IEnumerable records, + UpsertRecordOptions? options, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + Verify.NotNull(records); + + var tasks = records.Select(record => this.InternalUpsertAsync(record, cancellationToken)); + + var keys = await Task.WhenAll(tasks).ConfigureAwait(false); + + foreach (var key in keys) + { + if (key is not null) + { + yield return key; + } + } + } + + #endregion + + #region private + + private async Task RunOperationAsync(string operationName, Func> operation) + { + try + { + return await operation.Invoke().ConfigureAwait(false); + } + catch (Exception ex) + { + throw new VectorStoreOperationException("Call to vector store failed.", ex) + { + VectorStoreType = DatabaseName, + CollectionName = this.CollectionName, + OperationName = operationName + }; + } + } + + private static void VerifyPartitionKeyProperty(string partitionKeyPropertyName, IReadOnlyList properties) + { + var partitionKeyProperty = properties + .FirstOrDefault(l => l.DataModelPropertyName.Equals(partitionKeyPropertyName, StringComparison.Ordinal)); + + if (partitionKeyProperty is null) + { + throw new ArgumentException("Partition key property must be part of record definition."); + } + + if (partitionKeyProperty.PropertyType != typeof(string)) + { + throw new ArgumentException("Partition key property must be string."); + } + } + + /// + /// Returns instance of with applied indexing policy. + /// More information here: . + /// + private ContainerProperties GetContainerProperties() + { + // Process Vector properties. + var embeddings = new Collection(); + var vectorIndexPaths = new Collection(); + + foreach (var property in this._propertyReader.VectorProperties) + { + var vectorPropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + + if (property.Dimensions is not > 0) + { + throw new VectorStoreOperationException($"Property {nameof(property.Dimensions)} on {nameof(VectorStoreRecordVectorProperty)} '{property.DataModelPropertyName}' must be set to a positive integer to create a collection."); + } + + var path = $"/{vectorPropertyName}"; + + var embedding = new Embedding + { + DataType = GetDataType(property.PropertyType, vectorPropertyName), + Dimensions = (ulong)property.Dimensions, + DistanceFunction = GetDistanceFunction(property.DistanceFunction, vectorPropertyName), + Path = path + }; + + var vectorIndexPath = new VectorIndexPath + { + Type = GetIndexKind(property.IndexKind, vectorPropertyName), + Path = path + }; + + embeddings.Add(embedding); + vectorIndexPaths.Add(vectorIndexPath); + } + + var vectorEmbeddingPolicy = new VectorEmbeddingPolicy(embeddings); + var indexingPolicy = new IndexingPolicy + { + VectorIndexes = vectorIndexPaths, + IndexingMode = this._options.IndexingMode, + Automatic = this._options.Automatic + }; + + if (indexingPolicy.IndexingMode != IndexingMode.None) + { + // Process Data properties. + foreach (var property in this._propertyReader.DataProperties) + { + if (property.IsFilterable || property.IsFullTextSearchable) + { + indexingPolicy.IncludedPaths.Add(new IncludedPath { Path = $"/{this._storagePropertyNames[property.DataModelPropertyName]}/?" }); + } + } + + // Adding special mandatory indexing path. + indexingPolicy.IncludedPaths.Add(new IncludedPath { Path = "/" }); + + // Exclude vector paths to ensure optimized performance. + foreach (var vectorIndexPath in vectorIndexPaths) + { + indexingPolicy.ExcludedPaths.Add(new ExcludedPath { Path = $"{vectorIndexPath.Path}/*" }); + } + } + + return new ContainerProperties(this.CollectionName, partitionKeyPath: $"/{this._partitionKeyStoragePropertyName}") + { + VectorEmbeddingPolicy = vectorEmbeddingPolicy, + IndexingPolicy = indexingPolicy + }; + } + + /// + /// More information about Azure CosmosDB NoSQL index kinds here: . + /// + private static VectorIndexType GetIndexKind(string? indexKind, string vectorPropertyName) + { + return indexKind switch + { + IndexKind.Flat => VectorIndexType.Flat, + IndexKind.QuantizedFlat => VectorIndexType.QuantizedFlat, + IndexKind.DiskAnn => VectorIndexType.DiskANN, + _ => throw new InvalidOperationException($"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB NoSQL VectorStore.") + }; + } + + /// + /// More information about Azure CosmosDB NoSQL distance functions here: . + /// + private static DistanceFunction GetDistanceFunction(string? distanceFunction, string vectorPropertyName) + { + return distanceFunction switch + { + SKDistanceFunction.CosineSimilarity => DistanceFunction.Cosine, + SKDistanceFunction.DotProductSimilarity => DistanceFunction.DotProduct, + SKDistanceFunction.EuclideanDistance => DistanceFunction.Euclidean, + _ => throw new InvalidOperationException($"Distance function '{distanceFunction}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB NoSQL VectorStore.") + }; + } + + /// + /// Returns based on vector property type. + /// + private static VectorDataType GetDataType(Type vectorDataType, string vectorPropertyName) + { + return vectorDataType switch + { +#if NET5_0_OR_GREATER + Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Float16, +#endif + Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Float32, + Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Uint8, + Type type when type == typeof(ReadOnlyMemory) || type == typeof(ReadOnlyMemory?) => VectorDataType.Int8, + _ => throw new InvalidOperationException($"Data type '{vectorDataType}' for {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Azure CosmosDB NoSQL VectorStore.") + }; + } + + private async IAsyncEnumerable InternalGetAsync( + IEnumerable keys, + GetRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(keys); + + const string OperationName = "GetItemQueryIterator"; + + var includeVectors = options?.IncludeVectors ?? false; + var fields = new List(includeVectors ? this._storagePropertyNames.Values : this._nonVectorStoragePropertyNames); + var queryDefinition = this.GetSelectQuery(keys.ToList(), fields); + + await foreach (var jsonObject in this.GetItemsAsync(queryDefinition, cancellationToken).ConfigureAwait(false)) + { + yield return VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromStorageToDataModel(jsonObject, new() { IncludeVectors = includeVectors })); + } + } + + private async Task InternalUpsertAsync( + TRecord record, + CancellationToken cancellationToken) + { + Verify.NotNull(record); + + const string OperationName = "UpsertItem"; + + var jsonObject = VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record)); + + var keyValue = jsonObject.TryGetPropertyValue(this._keyStoragePropertyName, out var jsonKey) ? jsonKey?.ToString() : null; + var partitionKeyValue = jsonObject.TryGetPropertyValue(this._partitionKeyStoragePropertyName, out var jsonPartitionKey) ? jsonPartitionKey?.ToString() : null; + + if (string.IsNullOrWhiteSpace(keyValue)) + { + throw new VectorStoreOperationException($"Key property {this._propertyReader.KeyPropertyName} is not initialized."); + } + + if (string.IsNullOrWhiteSpace(partitionKeyValue)) + { + throw new VectorStoreOperationException($"Partition key property {this._partitionKeyPropertyName} is not initialized."); + } + + await this.RunOperationAsync(OperationName, () => + this._database + .GetContainer(this.CollectionName) + .UpsertItemAsync(jsonObject, new PartitionKey(partitionKeyValue), cancellationToken: cancellationToken)) + .ConfigureAwait(false); + + return new AzureCosmosDBNoSQLCompositeKey(keyValue!, partitionKeyValue!); + } + + private async Task InternalDeleteAsync(IEnumerable keys, CancellationToken cancellationToken) + { + Verify.NotNull(keys); + + var tasks = keys.Select(key => + { + Verify.NotNullOrWhiteSpace(key.RecordKey); + Verify.NotNullOrWhiteSpace(key.PartitionKey); + + return this.RunOperationAsync("DeleteItem", () => + this._database + .GetContainer(this.CollectionName) + .DeleteItemAsync(key.RecordKey, new PartitionKey(key.PartitionKey), cancellationToken: cancellationToken)); + }); + + await Task.WhenAll(tasks).ConfigureAwait(false); + } + + private QueryDefinition GetSelectQuery(List keys, List fields) + { + Verify.True(keys.Count > 0, "At least one key should be provided.", nameof(keys)); + + const string WhereClauseDelimiter = " OR "; + const string SelectClauseDelimiter = ","; + + const string RecordKeyVariableName = "rk"; + const string PartitionKeyVariableName = "pk"; + + const string TableVariableName = "x"; + + var selectClauseArguments = string.Join(SelectClauseDelimiter, + fields.Select(field => $"{TableVariableName}.{field}")); + + var whereClauseArguments = string.Join(WhereClauseDelimiter, + keys.Select((key, index) => + $"({TableVariableName}.{this._keyStoragePropertyName} = @{RecordKeyVariableName}{index} AND " + + $"{TableVariableName}.{this._partitionKeyStoragePropertyName} = @{PartitionKeyVariableName}{index})")); + + var query = $"SELECT {selectClauseArguments} FROM {TableVariableName} WHERE {whereClauseArguments}"; + + var queryDefinition = new QueryDefinition(query); + + for (var i = 0; i < keys.Count; i++) + { + var recordKey = keys[i].RecordKey; + var partitionKey = keys[i].PartitionKey; + + Verify.NotNullOrWhiteSpace(recordKey); + Verify.NotNullOrWhiteSpace(partitionKey); + + queryDefinition.WithParameter($"@{RecordKeyVariableName}{i}", recordKey); + queryDefinition.WithParameter($"@{PartitionKeyVariableName}{i}", partitionKey); + } + + return queryDefinition; + } + + private async IAsyncEnumerable GetItemsAsync(QueryDefinition queryDefinition, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var iterator = this._database + .GetContainer(this.CollectionName) + .GetItemQueryIterator(queryDefinition); + + while (iterator.HasMoreResults) + { + var response = await iterator.ReadNextAsync(cancellationToken).ConfigureAwait(false); + + foreach (var record in response.Resource) + { + if (record is not null) + { + yield return record; + } + } + } + } + + /// + /// Returns custom mapper, generic data model mapper or default record mapper. + /// + private IVectorStoreRecordMapper InitializeMapper(JsonSerializerOptions jsonSerializerOptions) + { + if (this._options.JsonObjectCustomMapper is not null) + { + return this._options.JsonObjectCustomMapper; + } + + if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + var mapper = new AzureCosmosDBNoSQLGenericDataModelMapper(this._propertyReader.Properties, this._storagePropertyNames, jsonSerializerOptions); + return (mapper as IVectorStoreRecordMapper)!; + } + + return new AzureCosmosDBNoSQLVectorStoreRecordMapper( + this._storagePropertyNames[this._propertyReader.KeyPropertyName], + this._storagePropertyNames, + jsonSerializerOptions); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs new file mode 100644 index 000000000000..a42d821a7d30 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions.cs @@ -0,0 +1,59 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Options when creating a . +/// +public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions where TRecord : class +{ + /// + /// Gets or sets an optional custom mapper to use when converting between the data model and the Azure CosmosDB NoSQL record. + /// + /// + /// If not set, the default mapper that is provided by the Azure CosmosDB NoSQL client SDK will be used. + /// + public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; + + /// + /// Gets or sets an optional record definition that defines the schema of the record type. + /// + /// + /// If not provided, the schema will be inferred from the record model class using reflection. + /// In this case, the record model properties must be annotated with the appropriate attributes to indicate their usage. + /// See , and . + /// + public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// Gets or sets the JSON serializer options to use when converting between the data model and the Azure CosmosDB NoSQL record. + /// + public JsonSerializerOptions? JsonSerializerOptions { get; init; } = null; + + /// + /// The property name to use as partition key. + /// + public string? PartitionKeyPropertyName { get; init; } = null; + + /// + /// Specifies the indexing mode in the Azure Cosmos DB service. + /// More information here: . + /// + /// + /// Default is . + /// + public IndexingMode IndexingMode { get; init; } = IndexingMode.Consistent; + + /// + /// Gets or sets a value that indicates whether automatic indexing is enabled for a collection in the Azure Cosmos DB service. + /// + /// + /// Default is . + /// + public bool Automatic { get; init; } = true; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs new file mode 100644 index 000000000000..ced34a5b2693 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordMapper.cs @@ -0,0 +1,71 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Class for mapping between a json node stored in Azure CosmosDB NoSQL and the consumer data model. +/// +/// The consumer data model to map to or from. +internal sealed class AzureCosmosDBNoSQLVectorStoreRecordMapper : IVectorStoreRecordMapper + where TRecord : class +{ + /// The JSON serializer options to use when converting between the data model and the Azure CosmosDB NoSQL record. + private readonly JsonSerializerOptions _jsonSerializerOptions; + + /// The storage property name of the key field of consumer data model. + private readonly string _keyStoragePropertyName; + + /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. + private readonly Dictionary _storagePropertyNames = []; + + public AzureCosmosDBNoSQLVectorStoreRecordMapper( + string keyStoragePropertyName, + Dictionary storagePropertyNames, + JsonSerializerOptions jsonSerializerOptions) + { + Verify.NotNull(jsonSerializerOptions); + + this._keyStoragePropertyName = keyStoragePropertyName; + this._storagePropertyNames = storagePropertyNames; + this._jsonSerializerOptions = jsonSerializerOptions; + } + + public JsonObject MapFromDataToStorageModel(TRecord dataModel) + { + var jsonObject = JsonSerializer.SerializeToNode(dataModel, this._jsonSerializerOptions)!.AsObject(); + + // Key property in Azure CosmosDB NoSQL has a reserved name. + RenameJsonProperty(jsonObject, this._keyStoragePropertyName, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName); + + return jsonObject; + } + + public TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + { + // Rename key property for valid deserialization. + RenameJsonProperty(storageModel, AzureCosmosDBNoSQLConstants.ReservedKeyPropertyName, this._keyStoragePropertyName); + + return storageModel.Deserialize(this._jsonSerializerOptions)!; + } + + #region private + + private static void RenameJsonProperty(JsonObject jsonObject, string oldKey, string newKey) + { + if (jsonObject is not null && jsonObject.ContainsKey(oldKey)) + { + JsonNode? value = jsonObject[oldKey]; + + jsonObject.Remove(oldKey); + + jsonObject[newKey] = value; + } + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj index 0ffb5b602e05..32fd24223c81 100644 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/Connectors.Memory.AzureCosmosDBNoSQL.csproj @@ -27,4 +27,8 @@ + + + + diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/CosmosSystemTextJSonSerializer.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/CosmosSystemTextJSonSerializer.cs deleted file mode 100644 index 0737ce09c120..000000000000 --- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/CosmosSystemTextJSonSerializer.cs +++ /dev/null @@ -1,130 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -// Taken from https://github.com/Azure/azure-cosmos-dotnet-v3/pull/4332 - -using System; -using System.Diagnostics.CodeAnalysis; -using System.IO; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.Azure.Cosmos; - -/// -/// This class provides a default implementation of System.Text.Json Cosmos Linq Serializer. -/// -internal sealed class CosmosSystemTextJsonSerializer : CosmosLinqSerializer -{ - /// - /// A read-only instance of . - /// - private readonly JsonSerializerOptions _jsonSerializerOptions; - - /// - /// Creates an instance of - /// with the default values for the Cosmos SDK - /// - /// An instance of containing the json serialization options. - public CosmosSystemTextJsonSerializer( - JsonSerializerOptions jsonSerializerOptions) - { - this._jsonSerializerOptions = jsonSerializerOptions; - } - - /// - [return: MaybeNull] - public override T FromStream(Stream stream) - { - if (stream == null) - { - throw new ArgumentNullException(nameof(stream)); - } - - if (stream.CanSeek && stream.Length == 0) - { - return default; - } - - if (typeof(Stream).IsAssignableFrom(typeof(T))) - { - return (T)(object)stream; - } - - using (stream) - { - return JsonSerializer.Deserialize(stream, this._jsonSerializerOptions); - } - } - - /// - public override Stream ToStream(T input) - { - MemoryStream streamPayload = new(); - JsonSerializer.Serialize( - utf8Json: streamPayload, - value: input, - options: this._jsonSerializerOptions); - - streamPayload.Position = 0; - return streamPayload; - } - - /// - /// Convert a MemberInfo to a string for use in LINQ query translation. - /// - /// Any MemberInfo used in the query. - /// A serialized representation of the member. - /// - /// Note that this is just a default implementation which handles the basic scenarios. Any passed in - /// here are not going to be reflected in SerializeMemberName(). For example, if customers passed in a JsonSerializerOption such as below - /// - /// - /// - /// This would not be honored by SerializeMemberName() unless it included special handling for this, for example. - /// - /// (true); - /// if (jsonExtensionDataAttribute != null) - /// { - /// return null; - /// } - /// JsonPropertyNameAttribute jsonPropertyNameAttribute = memberInfo.GetCustomAttribute(true); - /// if (!string.IsNullOrEmpty(jsonPropertyNameAttribute?.Name)) - /// { - /// return jsonPropertyNameAttribute.Name; - /// } - /// return System.Text.Json.JsonNamingPolicy.CamelCase.ConvertName(memberInfo.Name); - /// } - /// ]]> - /// - /// To handle such scenarios, please create a custom serializer which inherits from the and overrides the - /// SerializeMemberName to add any special handling. - /// - public override string? SerializeMemberName(MemberInfo memberInfo) - { - JsonExtensionDataAttribute? jsonExtensionDataAttribute = - memberInfo.GetCustomAttribute(true); - - if (jsonExtensionDataAttribute != null) - { - return null; - } - - JsonPropertyNameAttribute? jsonPropertyNameAttribute = memberInfo.GetCustomAttribute(true); - if (jsonPropertyNameAttribute is { } && !string.IsNullOrEmpty(jsonPropertyNameAttribute.Name)) - { - return jsonPropertyNameAttribute.Name; - } - - return memberInfo.Name; - } -} diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs new file mode 100644 index 000000000000..26e33af811dc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory.cs @@ -0,0 +1,28 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; + +/// +/// Interface for constructing Azure CosmosDB NoSQL instances when using to retrieve these. +/// +public interface IAzureCosmosDBNoSQLVectorStoreRecordCollectionFactory +{ + /// + /// Constructs a new instance of the . + /// + /// The data type of the record key. + /// The data model to use for adding, updating and retrieving data from storage. + /// that can be used to manage the collections in Azure CosmosDB NoSQL. + /// The name of the collection to connect to. + /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. + /// The new instance of . + IVectorStoreRecordCollection CreateVectorStoreRecordCollection( + Database database, + string name, + VectorStoreRecordDefinition? vectorStoreRecordDefinition) + where TKey : notnull + where TRecord : class; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs index a4f422d5e7d9..7eaf1201abc3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs +++ b/dotnet/src/Connectors/Connectors.Memory.DuckDB/Database.cs @@ -118,7 +118,7 @@ public async IAsyncEnumerable GetNearestMatchesAsync( using var cmd = conn.CreateCommand(); cmd.CommandText = $@" - SELECT key, metadata, timestamp, embedding, (embedding <=> {embeddingArrayString}) as score FROM {TableName} + SELECT key, metadata, timestamp, embedding, list_cosine_similarity(embedding, {embeddingArrayString}) as score FROM {TableName} WHERE collection=${nameof(collectionName)} AND len(embedding) > 0 AND score >= {minRelevanceScore.ToString("F12", CultureInfo.InvariantCulture)} ORDER BY score DESC LIMIT ${nameof(limit)};"; diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs index 294544ea9e64..b350a530a455 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryRecord.cs @@ -1,10 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; -using System.Text.Json.Serialization; using Kusto.Cloud.Platform.Utils; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Kusto; @@ -26,7 +24,6 @@ public sealed class KustoMemoryRecord /// /// Source content embedding. /// - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Embedding { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs index dab4955787a3..d70b8ba6a603 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/Query.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -30,7 +29,6 @@ public sealed class Query /// /// Vector dense data. This should be the same length as the dimension of the index being queried. /// - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Vector { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs index 811ab0b58daf..7d7b597c3faa 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/Model/SparseVectorData.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Pinecone; @@ -24,7 +23,6 @@ public class SparseVectorData /// /// The corresponding values of the sparse data, which must be the same length as the indices. [JsonPropertyName("values")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Values { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs index 1e6e546d6507..6dc0e72c66a2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeDocument.cs @@ -25,7 +25,6 @@ public class PineconeDocument /// Vector dense data. This should be the same length as the dimension of the index being queried. /// [JsonPropertyName("values")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Values { get; set; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs index 7a6fc9767f62..eb8caaa5a17d 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreOptions.cs @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Pinecone; public sealed class PineconeVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if custom options are required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// public IPineconeVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs index 323681f629be..521576c8a12c 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordCollection.cs @@ -33,7 +33,7 @@ public sealed class PineconeVectorStoreRecordCollection : IVectorStoreR private readonly Sdk.PineconeClient _pineconeClient; private readonly PineconeVectorStoreRecordCollectionOptions _options; - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + private readonly VectorStoreRecordPropertyReader _propertyReader; private readonly IVectorStoreRecordMapper _mapper; private Sdk.Index? _index; @@ -56,11 +56,19 @@ public PineconeVectorStoreRecordCollection(Sdk.PineconeClient pineconeClient, st this._pineconeClient = pineconeClient; this.CollectionName = collectionName; this._options = options ?? new PineconeVectorStoreRecordCollectionOptions(); - this._vectorStoreRecordDefinition = this._options.VectorStoreRecordDefinition ?? VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(TRecord), true); + this._propertyReader = new VectorStoreRecordPropertyReader( + typeof(TRecord), + this._options.VectorStoreRecordDefinition, + new() + { + RequiresAtLeastOneVector = true, + SupportsMultipleKeys = false, + SupportsMultipleVectors = false, + }); if (this._options.VectorCustomMapper is null) { - this._mapper = new PineconeVectorStoreRecordMapper(this._vectorStoreRecordDefinition); + this._mapper = new PineconeVectorStoreRecordMapper(this._propertyReader); } else { @@ -87,7 +95,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke public async Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // we already run through record property validation, so a single VectorStoreRecordVectorProperty is guaranteed. - var vectorProperty = this._vectorStoreRecordDefinition.Properties.OfType().First(); + var vectorProperty = this._propertyReader.VectorProperty!; var (dimension, metric) = PineconeVectorStoreCollectionCreateMapping.MapServerlessIndex(vectorProperty); await this.RunOperationAsync( diff --git a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs index da1d95ad6de9..eadbfb718b7b 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Pinecone/PineconeVectorStoreRecordMapper.cs @@ -3,9 +3,6 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Nodes; using Microsoft.SemanticKernel.Data; using Pinecone; @@ -52,53 +49,38 @@ internal sealed class PineconeVectorStoreRecordMapper : IVectorStoreRec typeof(ReadOnlyMemory?), ]; - private readonly PropertyInfo _keyPropertyInfo; - - private readonly List _dataPropertiesInfo; - - private readonly PropertyInfo _vectorPropertyInfo; - - private readonly Dictionary _storagePropertyNames = []; - - private readonly Dictionary _jsonPropertyNames = []; + private readonly VectorStoreRecordPropertyReader _propertyReader; /// /// Initializes a new instance of the class. /// - /// The record definition that defines the schema of the record type. + /// A helper to access property information for the current data model and record definition. public PineconeVectorStoreRecordMapper( - VectorStoreRecordDefinition vectorStoreRecordDefinition) + VectorStoreRecordPropertyReader propertyReader) { // Validate property types. - var propertiesInfo = VectorStoreRecordPropertyReader.FindProperties(typeof(TRecord), vectorStoreRecordDefinition, supportsMultipleVectors: false); - VectorStoreRecordPropertyReader.VerifyPropertyTypes([propertiesInfo.KeyProperty], s_supportedKeyTypes, "Key"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(propertiesInfo.DataProperties, s_supportedDataTypes, s_supportedEnumerableDataElementTypes, "Data"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(propertiesInfo.VectorProperties, s_supportedVectorTypes, "Vector"); + propertyReader.VerifyHasParameterlessConstructor(); + propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + propertyReader.VerifyDataProperties(s_supportedDataTypes, s_supportedEnumerableDataElementTypes); + propertyReader.VerifyVectorProperties(s_supportedVectorTypes); // Assign. - this._keyPropertyInfo = propertiesInfo.KeyProperty; - this._dataPropertiesInfo = propertiesInfo.DataProperties; - this._vectorPropertyInfo = propertiesInfo.VectorProperties[0]; - - // Get storage names and store for later use. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify(typeof(TRecord).Name, vectorStoreRecordDefinition, supportsMultipleVectors: false, requiresAtLeastOneVector: true); - this._jsonPropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToJsonPropertyNameMap(properties, typeof(TRecord), JsonSerializerOptions.Default); - this._storagePropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToStorageNameMap(properties); + this._propertyReader = propertyReader; } /// public Vector MapFromDataToStorageModel(TRecord dataModel) { - var keyObject = this._keyPropertyInfo.GetValue(dataModel); + var keyObject = this._propertyReader.KeyPropertyInfo.GetValue(dataModel); if (keyObject is null) { - throw new VectorStoreRecordMappingException($"Key property {this._keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName} may not be null."); + throw new VectorStoreRecordMappingException($"Key property {this._propertyReader.KeyPropertyName} on provided record of type {typeof(TRecord).FullName} may not be null."); } var metadata = new MetadataMap(); - foreach (var dataPropertyInfo in this._dataPropertiesInfo) + foreach (var dataPropertyInfo in this._propertyReader.DataPropertiesInfo) { - var propertyName = this._storagePropertyNames[dataPropertyInfo.Name]; + var propertyName = this._propertyReader.GetStoragePropertyName(dataPropertyInfo.Name); var propertyValue = dataPropertyInfo.GetValue(dataModel); if (propertyValue != null) { @@ -106,10 +88,10 @@ public Vector MapFromDataToStorageModel(TRecord dataModel) } } - var valuesObject = this._vectorPropertyInfo.GetValue(dataModel); + var valuesObject = this._propertyReader.FirstVectorPropertyInfo!.GetValue(dataModel); if (valuesObject is not ReadOnlyMemory values) { - throw new VectorStoreRecordMappingException($"Vector property {this._vectorPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName} may not be null."); + throw new VectorStoreRecordMappingException($"Vector property {this._propertyReader.FirstVectorPropertyName} on provided record of type {typeof(TRecord).FullName} may not be null."); } // TODO: what about sparse values? @@ -127,52 +109,70 @@ public Vector MapFromDataToStorageModel(TRecord dataModel) /// public TRecord MapFromStorageToDataModel(Vector storageModel, StorageToDataModelMapperOptions options) { - var keyJsonName = this._jsonPropertyNames[this._keyPropertyInfo.Name]; - var outputJsonObject = new JsonObject - { - { keyJsonName, JsonValue.Create(storageModel.Id) }, - }; + // Construct the output record. + var outputRecord = (TRecord)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); + + // Set Key. + this._propertyReader.KeyPropertyInfo.SetValue(outputRecord, storageModel.Id); + // Set Vector. if (options?.IncludeVectors is true) { - var propertyName = this._storagePropertyNames[this._vectorPropertyInfo.Name]; - var jsonName = this._jsonPropertyNames[this._vectorPropertyInfo.Name]; - outputJsonObject.Add(jsonName, new JsonArray(storageModel.Values.Select(x => JsonValue.Create(x)).ToArray())); + this._propertyReader.FirstVectorPropertyInfo!.SetValue( + outputRecord, + new ReadOnlyMemory(storageModel.Values)); } + // Set Data. if (storageModel.Metadata != null) { - foreach (var dataProperty in this._dataPropertiesInfo) - { - var propertyName = this._storagePropertyNames[dataProperty.Name]; - var jsonName = this._jsonPropertyNames[dataProperty.Name]; - - if (storageModel.Metadata.TryGetValue(propertyName, out var value)) - { - outputJsonObject.Add(jsonName, ConvertFromMetadataValueToJsonNode(value)); - } - } + VectorStoreRecordMapping.SetValuesOnProperties( + outputRecord, + this._propertyReader.DataPropertiesInfo, + this._propertyReader.StoragePropertyNamesMap, + storageModel.Metadata, + ConvertFromMetadataValueToNativeType); } - return outputJsonObject.Deserialize()!; + return outputRecord; } - private static JsonNode? ConvertFromMetadataValueToJsonNode(MetadataValue metadataValue) + private static object? ConvertFromMetadataValueToNativeType(MetadataValue metadataValue, Type targetType) => metadataValue.Inner switch { null => null, - bool boolValue => JsonValue.Create(boolValue), - string stringValue => JsonValue.Create(stringValue), - int intValue => JsonValue.Create(intValue), - long longValue => JsonValue.Create(longValue), - float floatValue => JsonValue.Create(floatValue), - double doubleValue => JsonValue.Create(doubleValue), - decimal decimalValue => JsonValue.Create(decimalValue), - MetadataValue[] array => new JsonArray(array.Select(ConvertFromMetadataValueToJsonNode).ToArray()), - List list => new JsonArray(list.Select(ConvertFromMetadataValueToJsonNode).ToArray()), + bool boolValue => boolValue, + string stringValue => stringValue, + // Numeric values are not always coming from the SDK in the desired type + // that the data model requires, so we need to convert them. + int intValue => ConvertToNumericValue(intValue, targetType), + long longValue => ConvertToNumericValue(longValue, targetType), + float floatValue => ConvertToNumericValue(floatValue, targetType), + double doubleValue => ConvertToNumericValue(doubleValue, targetType), + decimal decimalValue => ConvertToNumericValue(decimalValue, targetType), + MetadataValue[] array => VectorStoreRecordMapping.CreateEnumerable(array.Select(x => ConvertFromMetadataValueToNativeType(x, VectorStoreRecordPropertyVerification.GetCollectionElementType(targetType))), targetType), + List list => VectorStoreRecordMapping.CreateEnumerable(list.Select(x => ConvertFromMetadataValueToNativeType(x, VectorStoreRecordPropertyVerification.GetCollectionElementType(targetType))), targetType), _ => throw new VectorStoreRecordMappingException($"Unsupported metadata type: '{metadataValue.Inner?.GetType().FullName}'."), }; + private static object? ConvertToNumericValue(object? number, Type targetType) + { + if (number is null) + { + return null; + } + + return targetType switch + { + Type intType when intType == typeof(int) || intType == typeof(int?) => Convert.ToInt32(number), + Type longType when longType == typeof(long) || longType == typeof(long?) => Convert.ToInt64(number), + Type floatType when floatType == typeof(float) || floatType == typeof(float?) => Convert.ToSingle(number), + Type doubleType when doubleType == typeof(double) || doubleType == typeof(double?) => Convert.ToDouble(number), + Type decimalType when decimalType == typeof(decimal) || decimalType == typeof(decimal?) => Convert.ToDecimal(number), + _ => throw new VectorStoreRecordMappingException($"Unsupported target numeric type '{targetType.FullName}'."), + }; + } + // TODO: take advantage of MetadataValue.TryCreate once we upgrade the version of Pinecone.NET private static MetadataValue ConvertToMetadataValue(object? sourceValue) => sourceValue switch diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs index 4cec00ee35a6..5d3ad258a11e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Http/ApiSchema/SearchVectorsResponse.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -26,7 +25,6 @@ internal sealed class ScoredPoint public Dictionary Payload { get; set; } [JsonPropertyName("vector")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Vector { get; } [JsonConstructor] diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs new file mode 100644 index 000000000000..75017c912ce2 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantGenericDataModelMapper.cs @@ -0,0 +1,217 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Data; +using Qdrant.Client.Grpc; + +namespace Microsoft.SemanticKernel.Connectors.Qdrant; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Qdrant. +/// +internal class QdrantGenericDataModelMapper : IVectorStoreRecordMapper, PointStruct>, IVectorStoreRecordMapper, PointStruct> +{ + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; + + /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. + private readonly bool _hasNamedVectors; + + /// + /// Initializes a new instance of the class. + /// + /// A helper to access property information for the current data model and record definition. + /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. + public QdrantGenericDataModelMapper( + VectorStoreRecordPropertyReader propertyReader, + bool hasNamedVectors) + { + Verify.NotNull(propertyReader); + + // Validate property types. + propertyReader.VerifyDataProperties(QdrantVectorStoreRecordFieldMapping.s_supportedDataTypes, supportEnumerable: true); + propertyReader.VerifyVectorProperties(QdrantVectorStoreRecordFieldMapping.s_supportedVectorTypes); + + // Assign. + this._propertyReader = propertyReader; + this._hasNamedVectors = hasNamedVectors; + } + + /// + public PointStruct MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + // Create point. + var pointStruct = new PointStruct + { + Id = new PointId { Num = dataModel.Key }, + Vectors = new Vectors(), + Payload = { }, + }; + + // Loop through all properties and map each from the data model to the storage model. + MapProperties( + this._propertyReader.Properties, + dataModel.Data, + dataModel.Vectors, + pointStruct, + this._hasNamedVectors); + + return pointStruct; + } + + /// + public VectorStoreGenericDataModel MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) + { + var dataModel = new VectorStoreGenericDataModel(storageModel.Id.Num); + MapProperties(this._propertyReader.Properties, storageModel, dataModel.Data, dataModel.Vectors, this._hasNamedVectors); + return dataModel; + } + + /// + PointStruct IVectorStoreRecordMapper, PointStruct>.MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + // Create point. + var pointStruct = new PointStruct + { + Id = new PointId { Uuid = dataModel.Key.ToString("D") }, + Vectors = new Vectors(), + Payload = { }, + }; + + // Loop through all properties and map each from the data model to the storage model. + MapProperties( + this._propertyReader.Properties, + dataModel.Data, + dataModel.Vectors, + pointStruct, + this._hasNamedVectors); + + return pointStruct; + } + + /// + VectorStoreGenericDataModel IVectorStoreRecordMapper, PointStruct>.MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) + { + var dataModel = new VectorStoreGenericDataModel(new Guid(storageModel.Id.Uuid)); + MapProperties(this._propertyReader.Properties, storageModel, dataModel.Data, dataModel.Vectors, this._hasNamedVectors); + return dataModel; + } + + /// + /// Map the payload and vector properties from the data model to the qdrant storage model. + /// + /// The list of properties to map. + /// The payload properties on the data model. + /// The vector properties on the data model. + /// The storage model to map to. + /// A value indicating whether qdrant is using named vectors for this collection. + /// Thrown if a vector on the data model is not a supported type. + private static void MapProperties(IEnumerable properties, Dictionary dataProperties, Dictionary vectorProperties, PointStruct pointStruct, bool hasNamedVectors) + { + if (hasNamedVectors) + { + pointStruct.Vectors.Vectors_ = new NamedVectors(); + } + + foreach (var property in properties) + { + if (property is VectorStoreRecordDataProperty dataProperty) + { + var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; + + // Just skip this property if it's not in the data model. + if (!dataProperties.TryGetValue(dataProperty.DataModelPropertyName, out var propertyValue)) + { + continue; + } + + // Map. + pointStruct.Payload.Add(storagePropertyName, QdrantVectorStoreRecordFieldMapping.ConvertToGrpcFieldValue(propertyValue)); + } + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + + // Just skip this property if it's not in the data model. + if (!vectorProperties.TryGetValue(vectorProperty.DataModelPropertyName, out var vector)) + { + continue; + } + + // Validate. + if (vector is not ReadOnlyMemory floatROM) + { + throw new VectorStoreRecordMappingException($"Vector property '{vectorProperty.DataModelPropertyName}' on provided record of type {nameof(VectorStoreGenericDataModel)} must be of type ReadOnlyMemory and not null."); + } + + // Map. + if (hasNamedVectors) + { + pointStruct.Vectors.Vectors_.Vectors.Add(storagePropertyName, floatROM.ToArray()); + } + else + { + pointStruct.Vectors.Vector = floatROM.ToArray(); + } + } + } + } + + /// + /// Map the payload and vector properties from the qdrant storage model to the data model. + /// + /// The list of properties to map. + /// The storage model to map from. + /// The payload properties on the data model. + /// The vector properties on the data model. + /// A value indicating whether qdrant is using named vectors for this collection. + public static void MapProperties(IEnumerable properties, PointStruct storageModel, Dictionary dataProperties, Dictionary vectorProperties, bool hasNamedVectors) + { + foreach (var property in properties) + { + if (property is VectorStoreRecordDataProperty dataProperty) + { + var storagePropertyName = dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName; + + // Just skip this property if it's not in the storage model. + if (!storageModel.Payload.TryGetValue(storagePropertyName, out var propertyValue)) + { + continue; + } + + if (propertyValue.HasNullValue) + { + // Shortcut any null handling here so we don't have to check for it for each case. + dataProperties[dataProperty.DataModelPropertyName] = null; + } + else + { + var convertedValue = QdrantVectorStoreRecordFieldMapping.ConvertFromGrpcFieldValueToNativeType(propertyValue, dataProperty.PropertyType); + dataProperties[dataProperty.DataModelPropertyName] = convertedValue; + } + } + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + Vector? vector; + if (hasNamedVectors) + { + var storagePropertyName = vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName; + + // Just skip this property if it's not in the storage model. + if (!storageModel.Vectors.Vectors_.Vectors.TryGetValue(storagePropertyName, out vector)) + { + continue; + } + } + else + { + vector = storageModel.Vectors.Vector; + } + + vectorProperties[vectorProperty.DataModelPropertyName] = new ReadOnlyMemory(vector.Data.ToArray()); + } + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs index 0795b4a1ccf0..10f8641ba132 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorRecord.cs @@ -4,7 +4,6 @@ using System.Collections.Generic; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Qdrant; @@ -23,7 +22,6 @@ public class QdrantVectorRecord /// The embedding data. /// [JsonPropertyName("embedding")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Embedding { get; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs index ef9c9f1593f0..967060e56c33 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStore.cs @@ -65,9 +65,9 @@ public IVectorStoreRecordCollection GetCollection( return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection(this._qdrantClient.QdrantClient, name, vectorStoreRecordDefinition); } - var directlyCreatedStore = new QdrantVectorStoreRecordCollection(this._qdrantClient, name, new QdrantVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }); - var castCreatedStore = directlyCreatedStore as IVectorStoreRecordCollection; - return castCreatedStore!; + var recordCollection = new QdrantVectorStoreRecordCollection(this._qdrantClient, name, new QdrantVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }); + var castRecordCollection = recordCollection as IVectorStoreRecordCollection; + return castRecordCollection!; } /// diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs index e637ae2e06ab..209039194723 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreCollectionCreateMapping.cs @@ -75,7 +75,7 @@ public static VectorParams MapSingleVector(VectorStoreRecordVectorProperty vecto /// The mapping of property names to storage names. /// THe mapped . /// Thrown if the property is missing information or has unsupported options specified. - public static VectorParamsMap MapNamedVectors(IEnumerable vectorProperties, Dictionary storagePropertyNames) + public static VectorParamsMap MapNamedVectors(IEnumerable vectorProperties, IReadOnlyDictionary storagePropertyNames) { var vectorParamsMap = new VectorParamsMap(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs index c3ead1bdee2d..27790c731aed 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreOptions.cs @@ -14,7 +14,7 @@ public sealed class QdrantVectorStoreOptions public bool HasNamedVectors { get; set; } = false; /// - /// An optional factory to use for constructing instances, if custom options are required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// public IQdrantVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs index a49c530b2cdb..bf7504edc345 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordCollection.cs @@ -47,15 +47,12 @@ public sealed class QdrantVectorStoreRecordCollection : IVectorStoreRec /// Optional configuration options for this class. private readonly QdrantVectorStoreRecordCollectionOptions _options; - /// A definition of the current storage model. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; /// A mapper to use for converting between qdrant point and consumer models. private readonly IVectorStoreRecordMapper _mapper; - /// A dictionary that maps from a property name to the configured name that should be used when storing it. - private readonly Dictionary _storagePropertyNames = new(); - /// /// Initializes a new instance of the class. /// @@ -82,19 +79,25 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st // Verify. Verify.NotNull(qdrantClient); Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.PointStructCustomMapper is not null, s_supportedKeyTypes); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._qdrantClient = qdrantClient; this._collectionName = collectionName; this._options = options ?? new QdrantVectorStoreRecordCollectionOptions(); - this._vectorStoreRecordDefinition = this._options.VectorStoreRecordDefinition ?? VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(TRecord), true); + this._propertyReader = new VectorStoreRecordPropertyReader( + typeof(TRecord), + this._options.VectorStoreRecordDefinition, + new() + { + RequiresAtLeastOneVector = !this._options.HasNamedVectors, + SupportsMultipleKeys = false, + SupportsMultipleVectors = this._options.HasNamedVectors + }); // Validate property types. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify(typeof(TRecord).Name, this._vectorStoreRecordDefinition, supportsMultipleVectors: this._options.HasNamedVectors, requiresAtLeastOneVector: !this._options.HasNamedVectors); - VectorStoreRecordPropertyReader.VerifyPropertyTypes([properties.KeyProperty], s_supportedKeyTypes, "Key"); - - // Build a map of property names to storage names. - this._storagePropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToStorageNameMap(properties); + this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); // Assign Mapper. if (this._options.PointStructCustomMapper is not null) @@ -102,13 +105,19 @@ internal QdrantVectorStoreRecordCollection(MockableQdrantClient qdrantClient, st // Custom Mapper. this._mapper = this._options.PointStructCustomMapper; } + else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel) || typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + // Generic data model mapper. + this._mapper = (IVectorStoreRecordMapper)new QdrantGenericDataModelMapper( + this._propertyReader, + this._options.HasNamedVectors); + } else { // Default Mapper. this._mapper = new QdrantVectorStoreRecordMapper( - this._vectorStoreRecordDefinition, - this._options.HasNamedVectors, - this._storagePropertyNames); + this._propertyReader, + this._options.HasNamedVectors); } } @@ -129,7 +138,7 @@ public async Task CreateCollectionAsync(CancellationToken cancellationToken = de if (!this._options.HasNamedVectors) { // If we are not using named vectors, we can only have one vector property. We can assume we have exactly one, since this is already verified in the constructor. - var singleVectorProperty = this._vectorStoreRecordDefinition.Properties.OfType().First(); + var singleVectorProperty = this._propertyReader.VectorProperty; // Map the single vector property to the qdrant config. var vectorParams = QdrantVectorStoreCollectionCreateMapping.MapSingleVector(singleVectorProperty!); @@ -145,10 +154,10 @@ await this.RunOperationAsync( else { // Since we are using named vectors, iterate over all vector properties. - var vectorProperties = this._vectorStoreRecordDefinition.Properties.OfType(); + var vectorProperties = this._propertyReader.VectorProperties; // Map the named vectors to the qdrant config. - var vectorParamsMap = QdrantVectorStoreCollectionCreateMapping.MapNamedVectors(vectorProperties, this._storagePropertyNames); + var vectorParamsMap = QdrantVectorStoreCollectionCreateMapping.MapNamedVectors(vectorProperties, this._propertyReader.StoragePropertyNamesMap); // Create the collection with named vectors. await this.RunOperationAsync( @@ -160,10 +169,10 @@ await this.RunOperationAsync( } // Add indexes for each of the data properties that require filtering. - var dataProperties = this._vectorStoreRecordDefinition.Properties.OfType().Where(x => x.IsFilterable); + var dataProperties = this._propertyReader.DataProperties.Where(x => x.IsFilterable); foreach (var dataProperty in dataProperties) { - var storageFieldName = this._storagePropertyNames[dataProperty.DataModelPropertyName]; + var storageFieldName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); var schemaType = QdrantVectorStoreCollectionCreateMapping.s_schemaTypeMap[dataProperty.PropertyType!]; await this.RunOperationAsync( @@ -176,7 +185,7 @@ await this.RunOperationAsync( } // Add indexes for each of the data properties that require full text search. - dataProperties = this._vectorStoreRecordDefinition.Properties.OfType().Where(x => x.IsFullTextSearchable); + dataProperties = this._propertyReader.DataProperties.Where(x => x.IsFullTextSearchable); foreach (var dataProperty in dataProperties) { if (dataProperty.PropertyType != typeof(string)) @@ -184,7 +193,7 @@ await this.RunOperationAsync( throw new InvalidOperationException($"Property {nameof(dataProperty.IsFullTextSearchable)} on {nameof(VectorStoreRecordDataProperty)} '{dataProperty.DataModelPropertyName}' is set to true, but the property type is not a string. The Qdrant VectorStore supports {nameof(dataProperty.IsFullTextSearchable)} on string properties only."); } - var storageFieldName = this._storagePropertyNames[dataProperty.DataModelPropertyName]; + var storageFieldName = this._propertyReader.GetStoragePropertyName(dataProperty.DataModelPropertyName); await this.RunOperationAsync( "CreatePayloadIndex", diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs new file mode 100644 index 000000000000..99836772784c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordFieldMapping.cs @@ -0,0 +1,132 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Data; +using Qdrant.Client.Grpc; + +namespace Microsoft.SemanticKernel.Connectors.Qdrant; + +/// +/// Contains helper methods for mapping fields to and from the format required by the Qdrant client sdk. +/// +internal static class QdrantVectorStoreRecordFieldMapping +{ + /// A set of types that data properties on the provided model may have. + public static readonly HashSet s_supportedDataTypes = + [ + typeof(string), + typeof(int), + typeof(long), + typeof(double), + typeof(float), + typeof(bool), + typeof(int?), + typeof(long?), + typeof(double?), + typeof(float?), + typeof(bool?) + ]; + + /// A set of types that vectors on the provided model may have. + /// + /// While qdrant supports float32 and uint64, the api only supports float64, therefore + /// any float32 vectors will be converted to float64 before being sent to qdrant. + /// + public static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; + + /// + /// Convert the given to the correct native type based on its properties. + /// + /// The value to convert to a native type. + /// The target type to convert the value to. + /// The converted native value. + /// Thrown when an unsupported type is encountered. + public static object? ConvertFromGrpcFieldValueToNativeType(Value payloadValue, Type targetType) + { + return payloadValue.KindCase switch + { + Value.KindOneofCase.NullValue => null, + Value.KindOneofCase.IntegerValue => + targetType == typeof(int) || targetType == typeof(int?) ? + (object)(int)payloadValue.IntegerValue : + (object)payloadValue.IntegerValue, + Value.KindOneofCase.StringValue => payloadValue.StringValue, + Value.KindOneofCase.DoubleValue => + targetType == typeof(float) || targetType == typeof(float?) ? + (object)(float)payloadValue.DoubleValue : + (object)payloadValue.DoubleValue, + Value.KindOneofCase.BoolValue => payloadValue.BoolValue, + Value.KindOneofCase.ListValue => VectorStoreRecordMapping.CreateEnumerable( + payloadValue.ListValue.Values.Select( + x => ConvertFromGrpcFieldValueToNativeType(x, VectorStoreRecordPropertyVerification.GetCollectionElementType(targetType))), + targetType), + _ => throw new VectorStoreRecordMappingException($"Unsupported grpc value kind {payloadValue.KindCase}."), + }; + } + + /// + /// Convert the given to a object that can be stored in Qdrant. + /// + /// The object to convert. + /// The converted Qdrant value. + /// Thrown when an unsupported type is encountered. + public static Value ConvertToGrpcFieldValue(object? sourceValue) + { + var value = new Value(); + if (sourceValue is null) + { + value.NullValue = NullValue.NullValue; + } + else if (sourceValue is int intValue) + { + value.IntegerValue = intValue; + } + else if (sourceValue is long longValue) + { + value.IntegerValue = longValue; + } + else if (sourceValue is string stringValue) + { + value.StringValue = stringValue; + } + else if (sourceValue is float floatValue) + { + value.DoubleValue = floatValue; + } + else if (sourceValue is double doubleValue) + { + value.DoubleValue = doubleValue; + } + else if (sourceValue is bool boolValue) + { + value.BoolValue = boolValue; + } + else if (sourceValue is IEnumerable || + sourceValue is IEnumerable || + sourceValue is IEnumerable || + sourceValue is IEnumerable || + sourceValue is IEnumerable || + sourceValue is IEnumerable) + { + var listValue = sourceValue as IEnumerable; + value.ListValue = new ListValue(); + foreach (var item in listValue!) + { + value.ListValue.Values.Add(ConvertToGrpcFieldValue(item)); + } + } + else + { + throw new VectorStoreRecordMappingException($"Unsupported source value type {sourceValue?.GetType().FullName}."); + } + + return value; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs index 2c4238982391..4bdf4ac721b3 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/QdrantVectorStoreRecordMapper.cs @@ -1,12 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections; -using System.Collections.Generic; using System.Linq; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Nodes; using Microsoft.SemanticKernel.Data; using Qdrant.Client.Grpc; @@ -19,49 +14,8 @@ namespace Microsoft.SemanticKernel.Connectors.Qdrant; internal sealed class QdrantVectorStoreRecordMapper : IVectorStoreRecordMapper where TRecord : class { - /// A set of types that data properties on the provided model may have. - private static readonly HashSet s_supportedDataTypes = - [ - typeof(string), - typeof(int), - typeof(long), - typeof(double), - typeof(float), - typeof(bool), - typeof(int?), - typeof(long?), - typeof(double?), - typeof(float?), - typeof(bool?) - ]; - - /// A set of types that vectors on the provided model may have. - /// - /// While qdrant supports float32 and uint64, the api only supports float64, therefore - /// any float32 vectors will be converted to float64 before being sent to qdrant. - /// - private static readonly HashSet s_supportedVectorTypes = - [ - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?), - typeof(ReadOnlyMemory), - typeof(ReadOnlyMemory?) - ]; - - /// A property info object that points at the key property for the current model, allowing easy reading and writing of this property. - private readonly PropertyInfo _keyPropertyInfo; - - /// A list of property info objects that point at the data properties in the current model, and allows easy reading and writing of these properties. - private readonly List _dataPropertiesInfo; - - /// A list of property info objects that point at the vector properties in the current model, and allows easy reading and writing of these properties. - private readonly List _vectorPropertiesInfo; - - /// A dictionary that maps from a property name to the configured name that should be used when storing it. - private readonly Dictionary _storagePropertyNames; - - /// A dictionary that maps from a property name to the configured name that should be used when serializing it to json. - private readonly Dictionary _jsonPropertyNames = new(); + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. private readonly bool _hasNamedVectors; @@ -69,50 +23,42 @@ internal sealed class QdrantVectorStoreRecordMapper : IVectorStoreRecor /// /// Initializes a new instance of the class. /// - /// The record definition that defines the schema of the record type. + /// A helper to access property information for the current data model and record definition. /// A value indicating whether the vectors in the store are named, or whether there is just a single unnamed vector per qdrant point. - /// A dictionary that maps from a property name to the configured name that should be used when storing it. public QdrantVectorStoreRecordMapper( - VectorStoreRecordDefinition vectorStoreRecordDefinition, - bool hasNamedVectors, - Dictionary storagePropertyNames) + VectorStoreRecordPropertyReader propertyReader, + bool hasNamedVectors) { - Verify.NotNull(vectorStoreRecordDefinition); - Verify.NotNull(storagePropertyNames); + Verify.NotNull(propertyReader); // Validate property types. - var propertiesInfo = VectorStoreRecordPropertyReader.FindProperties(typeof(TRecord), vectorStoreRecordDefinition, supportsMultipleVectors: hasNamedVectors); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(propertiesInfo.DataProperties, s_supportedDataTypes, "Data", supportEnumerable: true); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(propertiesInfo.VectorProperties, s_supportedVectorTypes, "Vector"); + propertyReader.VerifyHasParameterlessConstructor(); + propertyReader.VerifyDataProperties(QdrantVectorStoreRecordFieldMapping.s_supportedDataTypes, supportEnumerable: true); + propertyReader.VerifyVectorProperties(QdrantVectorStoreRecordFieldMapping.s_supportedVectorTypes); // Assign. + this._propertyReader = propertyReader; this._hasNamedVectors = hasNamedVectors; - this._keyPropertyInfo = propertiesInfo.KeyProperty; - this._dataPropertiesInfo = propertiesInfo.DataProperties; - this._vectorPropertiesInfo = propertiesInfo.VectorProperties; - this._storagePropertyNames = storagePropertyNames; - - // Get json storage names and store for later use. - this._jsonPropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToJsonPropertyNameMap(propertiesInfo, typeof(TRecord), JsonSerializerOptions.Default); } /// public PointStruct MapFromDataToStorageModel(TRecord dataModel) { PointId pointId; - if (this._keyPropertyInfo.PropertyType == typeof(ulong)) + var keyPropertyInfo = this._propertyReader.KeyPropertyInfo; + if (keyPropertyInfo.PropertyType == typeof(ulong)) { - var key = this._keyPropertyInfo.GetValue(dataModel) as ulong? ?? throw new VectorStoreRecordMappingException($"Missing key property {this._keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); + var key = keyPropertyInfo.GetValue(dataModel) as ulong? ?? throw new VectorStoreRecordMappingException($"Missing key property {keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); pointId = new PointId { Num = key }; } - else if (this._keyPropertyInfo.PropertyType == typeof(Guid)) + else if (keyPropertyInfo.PropertyType == typeof(Guid)) { - var key = this._keyPropertyInfo.GetValue(dataModel) as Guid? ?? throw new VectorStoreRecordMappingException($"Missing key property {this._keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); + var key = keyPropertyInfo.GetValue(dataModel) as Guid? ?? throw new VectorStoreRecordMappingException($"Missing key property {keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); pointId = new PointId { Uuid = key.ToString("D") }; } else { - throw new VectorStoreRecordMappingException($"Unsupported key type {this._keyPropertyInfo.PropertyType.FullName} for key property {this._keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); + throw new VectorStoreRecordMappingException($"Unsupported key type {keyPropertyInfo.PropertyType.FullName} for key property {keyPropertyInfo.Name} on provided record of type {typeof(TRecord).FullName}."); } // Create point. @@ -124,20 +70,20 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) }; // Add point payload. - foreach (var dataPropertyInfo in this._dataPropertiesInfo) + foreach (var dataPropertyInfo in this._propertyReader.DataPropertiesInfo) { - var propertyName = this._storagePropertyNames[dataPropertyInfo.Name]; + var propertyName = this._propertyReader.GetStoragePropertyName(dataPropertyInfo.Name); var propertyValue = dataPropertyInfo.GetValue(dataModel); - pointStruct.Payload.Add(propertyName, ConvertToGrpcFieldValue(propertyValue)); + pointStruct.Payload.Add(propertyName, QdrantVectorStoreRecordFieldMapping.ConvertToGrpcFieldValue(propertyValue)); } // Add vectors. if (this._hasNamedVectors) { var namedVectors = new NamedVectors(); - foreach (var vectorPropertyInfo in this._vectorPropertiesInfo) + foreach (var vectorPropertyInfo in this._propertyReader.VectorPropertiesInfo) { - var propertyName = this._storagePropertyNames[vectorPropertyInfo.Name]; + var propertyName = this._propertyReader.GetStoragePropertyName(vectorPropertyInfo.Name); var propertyValue = vectorPropertyInfo.GetValue(dataModel); if (propertyValue is not null) { @@ -151,7 +97,7 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) else { // We already verified in the constructor via FindProperties that there is exactly one vector property when not using named vectors. - var vectorPropertyInfo = this._vectorPropertiesInfo.First(); + var vectorPropertyInfo = this._propertyReader.FirstVectorPropertyInfo!; if (vectorPropertyInfo.GetValue(dataModel) is ReadOnlyMemory floatROM) { pointStruct.Vectors.Vector = floatROM.ToArray(); @@ -169,130 +115,42 @@ public PointStruct MapFromDataToStorageModel(TRecord dataModel) public TRecord MapFromStorageToDataModel(PointStruct storageModel, StorageToDataModelMapperOptions options) { // Get the key property name and value. - var keyJsonName = this._jsonPropertyNames[this._keyPropertyInfo.Name]; - var keyPropertyValue = storageModel.Id.HasNum ? storageModel.Id.Num as object : storageModel.Id.Uuid as object; + var keyPropertyValue = storageModel.Id.HasNum ? storageModel.Id.Num as object : new Guid(storageModel.Id.Uuid) as object; - // Create a json object to represent the point. - var outputJsonObject = new JsonObject - { - { keyJsonName, JsonValue.Create(keyPropertyValue) }, - }; + // Construct the output record. + var outputRecord = (TRecord)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); - // Add each vector property if embeddings are included in the point. + // Set Key. + this._propertyReader.KeyPropertyInfo.SetValue(outputRecord, keyPropertyValue); + + // Set each vector property if embeddings are included in the point. if (options?.IncludeVectors is true) { - foreach (var vectorProperty in this._vectorPropertiesInfo) + if (this._hasNamedVectors) { - var propertyName = this._storagePropertyNames[vectorProperty.Name]; - var jsonName = this._jsonPropertyNames[vectorProperty.Name]; - - if (this._hasNamedVectors) - { - if (storageModel.Vectors.Vectors_.Vectors.TryGetValue(propertyName, out var vector)) - { - outputJsonObject.Add(jsonName, new JsonArray(vector.Data.Select(x => JsonValue.Create(x)).ToArray())); - } - } - else - { - outputJsonObject.Add(jsonName, new JsonArray(storageModel.Vectors.Vector.Data.Select(x => JsonValue.Create(x)).ToArray())); - } + VectorStoreRecordMapping.SetValuesOnProperties( + outputRecord, + this._propertyReader.VectorPropertiesInfo, + this._propertyReader.StoragePropertyNamesMap, + storageModel.Vectors.Vectors_.Vectors, + (Vector vector, Type targetType) => new ReadOnlyMemory(vector.Data.ToArray())); } - } - - // Add each data property. - foreach (var dataProperty in this._dataPropertiesInfo) - { - var propertyName = this._storagePropertyNames[dataProperty.Name]; - var jsonName = this._jsonPropertyNames[dataProperty.Name]; - - if (storageModel.Payload.TryGetValue(propertyName, out var value)) + else { - outputJsonObject.Add(jsonName, ConvertFromGrpcFieldValueToJsonNode(value)); + this._propertyReader.FirstVectorPropertyInfo!.SetValue( + outputRecord, + new ReadOnlyMemory(storageModel.Vectors.Vector.Data.ToArray())); } } - // Convert from json object to the target data model. - return JsonSerializer.Deserialize(outputJsonObject)!; - } - - /// - /// Convert the given to the correct native type based on its properties. - /// - /// The value to convert to a native type. - /// The converted native value. - /// Thrown when an unsupported type is encountered. - private static JsonNode? ConvertFromGrpcFieldValueToJsonNode(Value payloadValue) - { - return payloadValue.KindCase switch - { - Value.KindOneofCase.NullValue => null, - Value.KindOneofCase.IntegerValue => JsonValue.Create(payloadValue.IntegerValue), - Value.KindOneofCase.StringValue => JsonValue.Create(payloadValue.StringValue), - Value.KindOneofCase.DoubleValue => JsonValue.Create(payloadValue.DoubleValue), - Value.KindOneofCase.BoolValue => JsonValue.Create(payloadValue.BoolValue), - Value.KindOneofCase.ListValue => new JsonArray(payloadValue.ListValue.Values.Select(x => ConvertFromGrpcFieldValueToJsonNode(x)).ToArray()), - Value.KindOneofCase.StructValue => new JsonObject(payloadValue.StructValue.Fields.ToDictionary(x => x.Key, x => ConvertFromGrpcFieldValueToJsonNode(x.Value))), - _ => throw new VectorStoreRecordMappingException($"Unsupported grpc value kind {payloadValue.KindCase}."), - }; - } - - /// - /// Convert the given to a object that can be stored in Qdrant. - /// - /// The object to convert. - /// The converted Qdrant value. - /// Thrown when an unsupported type is encountered. - private static Value ConvertToGrpcFieldValue(object? sourceValue) - { - var value = new Value(); - if (sourceValue is null) - { - value.NullValue = NullValue.NullValue; - } - else if (sourceValue is int intValue) - { - value.IntegerValue = intValue; - } - else if (sourceValue is long longValue) - { - value.IntegerValue = longValue; - } - else if (sourceValue is string stringValue) - { - value.StringValue = stringValue; - } - else if (sourceValue is float floatValue) - { - value.DoubleValue = floatValue; - } - else if (sourceValue is double doubleValue) - { - value.DoubleValue = doubleValue; - } - else if (sourceValue is bool boolValue) - { - value.BoolValue = boolValue; - } - else if (sourceValue is IEnumerable || - sourceValue is IEnumerable || - sourceValue is IEnumerable || - sourceValue is IEnumerable || - sourceValue is IEnumerable || - sourceValue is IEnumerable) - { - var listValue = sourceValue as IEnumerable; - value.ListValue = new ListValue(); - foreach (var item in listValue!) - { - value.ListValue.Values.Add(ConvertToGrpcFieldValue(item)); - } - } - else - { - throw new VectorStoreRecordMappingException($"Unsupported source value type {sourceValue?.GetType().FullName}."); - } + // Set each data property. + VectorStoreRecordMapping.SetValuesOnProperties( + outputRecord, + this._propertyReader.DataPropertiesInfo, + this._propertyReader.StoragePropertyNamesMap, + storageModel.Payload, + QdrantVectorStoreRecordFieldMapping.ConvertFromGrpcFieldValueToNativeType); - return value; + return outputRecord; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs new file mode 100644 index 000000000000..334cce3b4b2f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetGenericDataModelMapper.cs @@ -0,0 +1,133 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.InteropServices; +using Microsoft.SemanticKernel.Data; +using StackExchange.Redis; + +namespace Microsoft.SemanticKernel.Connectors.Redis; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using hash sets. +/// +internal class RedisHashSetGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, HashEntry[] HashEntries)> +{ + /// All the properties from the record definition. + private readonly IReadOnlyList _properties; + + /// + /// Initializes a new instance of the class. + /// + /// All the properties from the record definition. + public RedisHashSetGenericDataModelMapper(IReadOnlyList properties) + { + Verify.NotNull(properties); + this._properties = properties; + } + + /// + public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + var hashEntries = new List(); + + foreach (var property in this._properties) + { + var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; + var sourceDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; + + // Only map properties across that actually exist in the input. + if (sourceDictionary is null || !sourceDictionary.TryGetValue(property.DataModelPropertyName, out var sourceValue)) + { + continue; + } + + // Replicate null if the property exists but is null. + if (sourceValue is null) + { + hashEntries.Add(new HashEntry(storagePropertyName, RedisValue.Null)); + continue; + } + + // Map data Properties + if (property is VectorStoreRecordDataProperty dataProperty) + { + hashEntries.Add(new HashEntry(storagePropertyName, RedisValue.Unbox(sourceValue))); + } + // Map vector properties + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + if (sourceValue is ReadOnlyMemory rom) + { + hashEntries.Add(new HashEntry(storagePropertyName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rom))); + } + else if (sourceValue is ReadOnlyMemory rod) + { + hashEntries.Add(new HashEntry(storagePropertyName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rod))); + } + else + { + throw new VectorStoreRecordMappingException($"Unsupported vector type {sourceValue.GetType().Name} found on property ${vectorProperty.DataModelPropertyName}. Only float and double vectors are supported."); + } + } + } + + return (dataModel.Key, hashEntries.ToArray()); + } + + /// + public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key, HashEntry[] HashEntries) storageModel, StorageToDataModelMapperOptions options) + { + var dataModel = new VectorStoreGenericDataModel(storageModel.Key); + + foreach (var property in this._properties) + { + var storagePropertyName = property.StoragePropertyName ?? property.DataModelPropertyName; + var targetDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; + var hashEntry = storageModel.HashEntries.FirstOrDefault(x => x.Name == storagePropertyName); + + // Only map properties across that actually exist in the input. + if (!hashEntry.Name.HasValue) + { + continue; + } + + // Replicate null if the property exists but is null. + if (hashEntry.Value.IsNull) + { + targetDictionary.Add(property.DataModelPropertyName, null); + continue; + } + + // Map data Properties + if (property is VectorStoreRecordDataProperty dataProperty) + { + var typeOrNullableType = Nullable.GetUnderlyingType(property.PropertyType) ?? property.PropertyType; + var convertedValue = Convert.ChangeType(hashEntry.Value, typeOrNullableType); + dataModel.Data.Add(dataProperty.DataModelPropertyName, convertedValue); + } + + // Map vector properties + else if (property is VectorStoreRecordVectorProperty vectorProperty) + { + if (property.PropertyType == typeof(ReadOnlyMemory) || property.PropertyType == typeof(ReadOnlyMemory?)) + { + var array = MemoryMarshal.Cast((byte[])hashEntry.Value!).ToArray(); + dataModel.Vectors.Add(vectorProperty.DataModelPropertyName, new ReadOnlyMemory(array)); + } + else if (property.PropertyType == typeof(ReadOnlyMemory) || property.PropertyType == typeof(ReadOnlyMemory?)) + { + var array = MemoryMarshal.Cast((byte[])hashEntry.Value!).ToArray(); + dataModel.Vectors.Add(vectorProperty.DataModelPropertyName, new ReadOnlyMemory(array)); + } + else + { + throw new VectorStoreRecordMappingException($"Unsupported vector type '{property.PropertyType.Name}' found on property '{property.DataModelPropertyName}'. Only float and double vectors are supported."); + } + } + } + + return dataModel; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs index e68edb98870e..a8c0831c6271 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollection.cs @@ -70,15 +70,12 @@ public sealed class RedisHashSetVectorStoreRecordCollection : IVectorSt /// Optional configuration options for this class. private readonly RedisHashSetVectorStoreRecordCollectionOptions _options; - /// A definition of the current storage model. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; /// An array of the names of all the data properties that are part of the Redis payload, i.e. all properties except the key and vector properties. private readonly RedisValue[] _dataStoragePropertyNames; - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. - private readonly Dictionary _storagePropertyNames = new(); - /// The mapper to use when mapping between the consumer data model and the Redis record. private readonly IVectorStoreRecordMapper _mapper; @@ -94,35 +91,49 @@ public RedisHashSetVectorStoreRecordCollection(IDatabase database, string collec // Verify. Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.HashEntriesCustomMapper is not null, s_supportedKeyTypes); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._database = database; this._collectionName = collectionName; this._options = options ?? new RedisHashSetVectorStoreRecordCollectionOptions(); - this._vectorStoreRecordDefinition = this._options.VectorStoreRecordDefinition ?? VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(TRecord), true); + this._propertyReader = new VectorStoreRecordPropertyReader( + typeof(TRecord), + this._options.VectorStoreRecordDefinition, + new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true + }); // Validate property types. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify(typeof(TRecord).Name, this._vectorStoreRecordDefinition, supportsMultipleVectors: true, requiresAtLeastOneVector: false); - VectorStoreRecordPropertyReader.VerifyPropertyTypes([properties.KeyProperty], s_supportedKeyTypes, "Key"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.DataProperties, s_supportedDataTypes, "Data"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.VectorProperties, s_supportedVectorTypes, "Vector"); + this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: false); + this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); // Lookup storage property names. - this._storagePropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToStorageNameMap(properties); - this._dataStoragePropertyNames = properties - .DataProperties - .Select(x => this._storagePropertyNames[x.DataModelPropertyName]) + this._dataStoragePropertyNames = this._propertyReader + .DataPropertyStoragePropertyNames .Select(RedisValue.Unbox) .ToArray(); // Assign Mapper. if (this._options.HashEntriesCustomMapper is not null) { + // Custom Mapper. this._mapper = this._options.HashEntriesCustomMapper; } + else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + // Generic data model mapper. + this._mapper = (IVectorStoreRecordMapper)new RedisHashSetGenericDataModelMapper(this._propertyReader.Properties); + } else { - this._mapper = new RedisHashSetVectorStoreRecordMapper(this._vectorStoreRecordDefinition, this._storagePropertyNames); + // Default Mapper. + this._mapper = new RedisHashSetVectorStoreRecordMapper(this._propertyReader); } } @@ -156,7 +167,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. - var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._vectorStoreRecordDefinition.Properties, this._storagePropertyNames); + var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._propertyReader.Properties, this._propertyReader.StoragePropertyNamesMap, useDollarPrefix: false); // Create the index creation params. // Add the collection name and colon as the index prefix, which means that any record where the key is prefixed with this text will be indexed by this index diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs index 7e17859ae0c9..71d5d9f7d338 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordCollectionOptions.cs @@ -13,13 +13,13 @@ public sealed class RedisHashSetVectorStoreRecordCollectionOptions { /// /// Gets or sets a value indicating whether the collection name should be prefixed to the - /// key names before reading or writing to the Redis store. Default is false. + /// key names before reading or writing to the Redis store. Default is true. /// /// /// For a record to be indexed by a specific Redis index, the key name must be prefixed with the matching prefix configured on the Redis index. /// You can either pass in keys that are already prefixed, or set this option to true to have the collection name prefixed to the key names automatically. /// - public bool PrefixCollectionNameToKeyNames { get; init; } = false; + public bool PrefixCollectionNameToKeyNames { get; init; } = true; /// /// Gets or sets an optional custom mapper to use when converting between the data model and the Redis record. diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs index ef31bf09f475..576dcb691f5a 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisHashSetVectorStoreRecordMapper.cs @@ -3,10 +3,7 @@ using System; using System.Collections.Generic; using System.Linq; -using System.Reflection; using System.Runtime.InteropServices; -using System.Text.Json; -using System.Text.Json.Nodes; using Microsoft.SemanticKernel.Data; using StackExchange.Redis; @@ -19,66 +16,38 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; internal sealed class RedisHashSetVectorStoreRecordMapper : IVectorStoreRecordMapper where TConsumerDataModel : class { - /// A property info object that points at the key property for the current model, allowing easy reading and writing of this property. - private readonly PropertyInfo _keyPropertyInfo; - - /// The name of the temporary json property that the key field will be serialized / parsed from. - private readonly string _keyFieldJsonPropertyName; - - /// A list of property info objects that point at the data properties in the current model, and allows easy reading and writing of these properties. - private readonly IEnumerable _dataPropertiesInfo; - - /// A list of property info objects that point at the vector properties in the current model, and allows easy reading and writing of these properties. - private readonly IEnumerable _vectorPropertiesInfo; - - /// A dictionary that maps from a property name to the configured name that should be used when storing it. - private readonly Dictionary _storagePropertyNames; - - /// A dictionary that maps from a property name to the configured name that should be used when serializing it to json for data and vector properties. - private readonly Dictionary _jsonPropertyNames = new(); + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; /// /// Initializes a new instance of the class. /// - /// The record definition that defines the schema of the record type. - /// A dictionary that maps from a property name to the configured name that should be used when storing it. + /// A helper to access property information for the current data model and record definition. public RedisHashSetVectorStoreRecordMapper( - VectorStoreRecordDefinition vectorStoreRecordDefinition, - Dictionary storagePropertyNames) + VectorStoreRecordPropertyReader propertyReader) { - Verify.NotNull(vectorStoreRecordDefinition); - Verify.NotNull(storagePropertyNames); - - (PropertyInfo keyPropertyInfo, List dataPropertiesInfo, List vectorPropertiesInfo) = VectorStoreRecordPropertyReader.FindProperties(typeof(TConsumerDataModel), vectorStoreRecordDefinition, supportsMultipleVectors: true); - - this._keyPropertyInfo = keyPropertyInfo; - this._dataPropertiesInfo = dataPropertiesInfo; - this._vectorPropertiesInfo = vectorPropertiesInfo; - this._storagePropertyNames = storagePropertyNames; - - this._keyFieldJsonPropertyName = VectorStoreRecordPropertyReader.GetJsonPropertyName(JsonSerializerOptions.Default, keyPropertyInfo); - foreach (var property in dataPropertiesInfo.Concat(vectorPropertiesInfo)) - { - this._jsonPropertyNames[property.Name] = VectorStoreRecordPropertyReader.GetJsonPropertyName(JsonSerializerOptions.Default, property); - } + Verify.NotNull(propertyReader); + propertyReader.VerifyHasParameterlessConstructor(); + this._propertyReader = propertyReader; } /// public (string Key, HashEntry[] HashEntries) MapFromDataToStorageModel(TConsumerDataModel dataModel) { - var keyValue = this._keyPropertyInfo.GetValue(dataModel) as string ?? throw new VectorStoreRecordMappingException($"Missing key property {this._keyPropertyInfo.Name} on provided record of type {typeof(TConsumerDataModel).FullName}."); + var keyValue = this._propertyReader.KeyPropertyInfo.GetValue(dataModel) as string ?? + throw new VectorStoreRecordMappingException($"Missing key property {this._propertyReader.KeyPropertyName} on provided record of type {typeof(TConsumerDataModel).FullName}."); var hashEntries = new List(); - foreach (var property in this._dataPropertiesInfo) + foreach (var property in this._propertyReader.DataPropertiesInfo) { - var storageName = this._storagePropertyNames[property.Name]; + var storageName = this._propertyReader.GetStoragePropertyName(property.Name); var value = property.GetValue(dataModel); hashEntries.Add(new HashEntry(storageName, RedisValue.Unbox(value))); } - foreach (var property in this._vectorPropertiesInfo) + foreach (var property in this._propertyReader.VectorPropertiesInfo) { - var storageName = this._storagePropertyNames[property.Name]; + var storageName = this._propertyReader.GetStoragePropertyName(property.Name); var value = property.GetValue(dataModel); if (value is not null) { @@ -87,11 +56,11 @@ public RedisHashSetVectorStoreRecordMapper( // collection constructor to ensure that the model has no other vector types. if (value is ReadOnlyMemory rom) { - hashEntries.Add(new HashEntry(storageName, ConvertVectorToBytes(rom))); + hashEntries.Add(new HashEntry(storageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rom))); } else if (value is ReadOnlyMemory rod) { - hashEntries.Add(new HashEntry(storageName, ConvertVectorToBytes(rod))); + hashEntries.Add(new HashEntry(storageName, RedisVectorStoreRecordFieldMapping.ConvertVectorToBytes(rod))); } } } @@ -102,68 +71,53 @@ public RedisHashSetVectorStoreRecordMapper( /// public TConsumerDataModel MapFromStorageToDataModel((string Key, HashEntry[] HashEntries) storageModel, StorageToDataModelMapperOptions options) { - var jsonObject = new JsonObject(); + var hashEntriesDictionary = storageModel.HashEntries.ToDictionary(x => (string)x.Name!, x => x.Value); - foreach (var property in this._dataPropertiesInfo) - { - var storageName = this._storagePropertyNames[property.Name]; - var jsonName = this._jsonPropertyNames[property.Name]; - var hashEntry = storageModel.HashEntries.FirstOrDefault(x => x.Name == storageName); - if (hashEntry.Name.HasValue) - { - var typeOrNullableType = Nullable.GetUnderlyingType(property.PropertyType) ?? property.PropertyType; - var convertedValue = Convert.ChangeType(hashEntry.Value, typeOrNullableType); - jsonObject.Add(jsonName, JsonValue.Create(convertedValue)); - } - } + // Construct the output record. + var outputRecord = (TConsumerDataModel)this._propertyReader.ParameterLessConstructorInfo.Invoke(null); - if (options.IncludeVectors) - { - foreach (var property in this._vectorPropertiesInfo) - { - var storageName = this._storagePropertyNames[property.Name]; - var jsonName = this._jsonPropertyNames[property.Name]; + // Set Key. + this._propertyReader.KeyPropertyInfo.SetValue(outputRecord, storageModel.Key); - var hashEntry = storageModel.HashEntries.FirstOrDefault(x => x.Name == storageName); - if (hashEntry.Name.HasValue) + // Set each vector property if embeddings should be returned. + if (options?.IncludeVectors is true) + { + VectorStoreRecordMapping.SetValuesOnProperties( + outputRecord, + this._propertyReader.VectorPropertiesInfo, + this._propertyReader.StoragePropertyNamesMap, + hashEntriesDictionary, + (RedisValue vector, Type targetType) => { - if (property.PropertyType == typeof(ReadOnlyMemory) || property.PropertyType == typeof(ReadOnlyMemory?)) + if (targetType == typeof(ReadOnlyMemory) || targetType == typeof(ReadOnlyMemory?)) { - var array = MemoryMarshal.Cast((byte[])hashEntry.Value!).ToArray(); - jsonObject.Add(jsonName, JsonValue.Create(array)); + var array = MemoryMarshal.Cast((byte[])vector!).ToArray(); + return new ReadOnlyMemory(array); } - else if (property.PropertyType == typeof(ReadOnlyMemory) || property.PropertyType == typeof(ReadOnlyMemory?)) + else if (targetType == typeof(ReadOnlyMemory) || targetType == typeof(ReadOnlyMemory?)) { - var array = MemoryMarshal.Cast((byte[])hashEntry.Value!).ToArray(); - jsonObject.Add(jsonName, JsonValue.Create(array)); + var array = MemoryMarshal.Cast((byte[])vector!).ToArray(); + return new ReadOnlyMemory(array); } else { - throw new VectorStoreRecordMappingException($"Invalid vector type '{property.PropertyType.Name}' found on property '{property.Name}' on provided record of type '{typeof(TConsumerDataModel).FullName}'. Only float and double vectors are supported."); + throw new VectorStoreRecordMappingException($"Unsupported vector type '{targetType}'. Only float and double vectors are supported."); } - } - } - } - - // Check that the key field is not already present in the redis value. - if (jsonObject.ContainsKey(this._keyFieldJsonPropertyName)) - { - throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'. Key property '{this._keyFieldJsonPropertyName}' is already present on retrieved object."); + }); } - // Since the key is not stored in the redis value, add it back in before deserializing into the data model. - jsonObject.Add(this._keyFieldJsonPropertyName, storageModel.Key); - - return JsonSerializer.Deserialize(jsonObject)!; - } - - private static byte[] ConvertVectorToBytes(ReadOnlyMemory vector) - { - return MemoryMarshal.AsBytes(vector.Span).ToArray(); - } + // Set each data property. + VectorStoreRecordMapping.SetValuesOnProperties( + outputRecord, + this._propertyReader.DataPropertiesInfo, + this._propertyReader.StoragePropertyNamesMap, + hashEntriesDictionary, + (RedisValue hashValue, Type targetType) => + { + var typeOrNullableType = Nullable.GetUnderlyingType(targetType) ?? targetType; + return Convert.ChangeType(hashValue, typeOrNullableType); + }); - private static byte[] ConvertVectorToBytes(ReadOnlyMemory vector) - { - return MemoryMarshal.AsBytes(vector.Span).ToArray(); + return outputRecord; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs new file mode 100644 index 000000000000..080813877720 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonGenericDataModelMapper.cs @@ -0,0 +1,139 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.Redis; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Redis when using JSON. +/// +internal class RedisJsonGenericDataModelMapper : IVectorStoreRecordMapper, (string Key, JsonNode Node)> +{ + /// All the properties from the record definition. + private readonly IReadOnlyList _properties; + + /// The JSON serializer options to use when converting between the data model and the Redis record. + private readonly JsonSerializerOptions _jsonSerializerOptions; + + /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. + public readonly Dictionary _storagePropertyNames; + + /// + /// Initializes a new instance of the class. + /// + /// All the properties from the record definition. + /// The JSON serializer options to use when converting between the data model and the Redis record. + public RedisJsonGenericDataModelMapper( + IReadOnlyList properties, + JsonSerializerOptions jsonSerializerOptions) + { + Verify.NotNull(properties); + Verify.NotNull(jsonSerializerOptions); + + this._properties = properties; + this._jsonSerializerOptions = jsonSerializerOptions; + + // Create a dictionary that maps from the data model property name to the storage property name. + this._storagePropertyNames = properties.Select(x => + { + if (x.StoragePropertyName is not null) + { + return new KeyValuePair( + x.DataModelPropertyName, + x.StoragePropertyName); + } + + if (jsonSerializerOptions.PropertyNamingPolicy is not null) + { + return new KeyValuePair( + x.DataModelPropertyName, + jsonSerializerOptions.PropertyNamingPolicy.ConvertName(x.DataModelPropertyName)); + } + + return new KeyValuePair( + x.DataModelPropertyName, + x.DataModelPropertyName); + }).ToDictionary(x => x.Key, x => x.Value); + } + + /// + public (string Key, JsonNode Node) MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + var jsonObject = new JsonObject(); + + foreach (var property in this._properties) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + var sourceDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; + + // Only map properties across that actually exist in the input. + if (sourceDictionary is null || !sourceDictionary.TryGetValue(property.DataModelPropertyName, out var sourceValue)) + { + continue; + } + + // Replicate null if the property exists but is null. + if (sourceValue is null) + { + jsonObject.Add(storagePropertyName, null); + continue; + } + + jsonObject.Add(storagePropertyName, JsonSerializer.SerializeToNode(sourceValue, property.PropertyType)); + } + + return (dataModel.Key, jsonObject); + } + + /// + public VectorStoreGenericDataModel MapFromStorageToDataModel((string Key, JsonNode Node) storageModel, StorageToDataModelMapperOptions options) + { + var dataModel = new VectorStoreGenericDataModel(storageModel.Key); + + // The redis result can be either a single object or an array with a single object in the case where we are doing an MGET. + JsonObject jsonObject; + if (storageModel.Node is JsonObject topLevelJsonObject) + { + jsonObject = topLevelJsonObject; + } + else if (storageModel.Node is JsonArray jsonArray && jsonArray.Count == 1 && jsonArray[0] is JsonObject arrayEntryJsonObject) + { + jsonObject = arrayEntryJsonObject; + } + else + { + throw new VectorStoreRecordMappingException($"Invalid data format for document with key '{storageModel.Key}'"); + } + + foreach (var property in this._properties) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + var targetDictionary = property is VectorStoreRecordDataProperty ? dataModel.Data : dataModel.Vectors; + + // Only map properties across that actually exist in the input. + if (!jsonObject.TryGetPropertyValue(storagePropertyName, out var sourceValue)) + { + continue; + } + + // Replicate null if the property exists but is null. + if (sourceValue is null) + { + targetDictionary.Add(property.DataModelPropertyName, null); + continue; + } + + // Map data and vector values. + if (property is VectorStoreRecordDataProperty || property is VectorStoreRecordVectorProperty) + { + targetDictionary.Add(property.DataModelPropertyName, JsonSerializer.Deserialize(sourceValue, property.PropertyType)); + } + } + + return dataModel; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs index 44a6bc41d195..491a598cb9e2 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisJsonVectorStoreRecordCollection.cs @@ -53,15 +53,12 @@ public sealed class RedisJsonVectorStoreRecordCollection : IVectorStore /// Optional configuration options for this class. private readonly RedisJsonVectorStoreRecordCollectionOptions _options; - /// A definition of the current storage model. - private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; /// An array of the storage names of all the data properties that are part of the Redis payload, i.e. all properties except the key and vector properties. private readonly string[] _dataStoragePropertyNames; - /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. - private readonly Dictionary _storagePropertyNames = new(); - /// The mapper to use when mapping between the consumer data model and the Redis record. private readonly IVectorStoreRecordMapper _mapper; @@ -80,37 +77,49 @@ public RedisJsonVectorStoreRecordCollection(IDatabase database, string collectio // Verify. Verify.NotNull(database); Verify.NotNullOrWhiteSpace(collectionName); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(typeof(TRecord), options?.JsonNodeCustomMapper is not null, s_supportedKeyTypes); + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(typeof(TRecord), options?.VectorStoreRecordDefinition is not null); // Assign. this._database = database; this._collectionName = collectionName; this._options = options ?? new RedisJsonVectorStoreRecordCollectionOptions(); this._jsonSerializerOptions = this._options.JsonSerializerOptions ?? JsonSerializerOptions.Default; - this._vectorStoreRecordDefinition = this._options.VectorStoreRecordDefinition ?? VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(TRecord), true); + this._propertyReader = new VectorStoreRecordPropertyReader( + typeof(TRecord), + this._options.VectorStoreRecordDefinition, + new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + JsonSerializerOptions = this._jsonSerializerOptions + }); // Validate property types. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify(typeof(TRecord).Name, this._vectorStoreRecordDefinition, supportsMultipleVectors: true, requiresAtLeastOneVector: false); - VectorStoreRecordPropertyReader.VerifyPropertyTypes([properties.KeyProperty], s_supportedKeyTypes, "Key"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.VectorProperties, s_supportedVectorTypes, "Vector"); - - // Lookup json storage property names. - var keyJsonPropertyName = VectorStoreRecordPropertyReader.GetJsonPropertyName(properties.KeyProperty, typeof(TRecord), this._jsonSerializerOptions); + this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); // Lookup storage property names. - this._storagePropertyNames = VectorStoreRecordPropertyReader.BuildPropertyNameToJsonPropertyNameMap(properties, typeof(TRecord), this._jsonSerializerOptions); - this._dataStoragePropertyNames = properties - .DataProperties - .Select(x => this._storagePropertyNames[x.DataModelPropertyName]) - .ToArray(); + this._dataStoragePropertyNames = this._propertyReader.DataPropertyJsonNames.ToArray(); // Assign Mapper. if (this._options.JsonNodeCustomMapper is not null) { + // Custom Mapper. this._mapper = this._options.JsonNodeCustomMapper; } + else if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + // Generic data model mapper. + this._mapper = (IVectorStoreRecordMapper)new RedisJsonGenericDataModelMapper( + this._propertyReader.Properties, + this._jsonSerializerOptions); + } else { - this._mapper = new RedisJsonVectorStoreRecordMapper(keyJsonPropertyName, this._jsonSerializerOptions); + // Default Mapper. + this._mapper = new RedisJsonVectorStoreRecordMapper(this._propertyReader.KeyPropertyJsonName, this._jsonSerializerOptions); } } @@ -144,7 +153,7 @@ public async Task CollectionExistsAsync(CancellationToken cancellationToke public Task CreateCollectionAsync(CancellationToken cancellationToken = default) { // Map the record definition to a schema. - var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._vectorStoreRecordDefinition.Properties, this._storagePropertyNames); + var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(this._propertyReader.Properties, this._propertyReader.JsonPropertyNamesMap, useDollarPrefix: true); // Create the index creation params. // Add the collection name and colon as the index prefix, which means that any record where the key is prefixed with this text will be indexed by this index diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs index 51a933d36062..2f41fea7b160 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStore.cs @@ -57,13 +57,13 @@ public IVectorStoreRecordCollection GetCollection( if (this._options.StorageType == RedisStorageType.HashSet) { - var directlyCreatedStore = new RedisHashSetVectorStoreRecordCollection(this._database, name, new RedisHashSetVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; - return directlyCreatedStore!; + var recordCollection = new RedisHashSetVectorStoreRecordCollection(this._database, name, new RedisHashSetVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + return recordCollection!; } else { - var directlyCreatedStore = new RedisJsonVectorStoreRecordCollection(this._database, name, new RedisJsonVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; - return directlyCreatedStore!; + var recordCollection = new RedisJsonVectorStoreRecordCollection(this._database, name, new RedisJsonVectorStoreRecordCollectionOptions() { VectorStoreRecordDefinition = vectorStoreRecordDefinition }) as IVectorStoreRecordCollection; + return recordCollection!; } } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs index 2bdb6a67b5ef..5878a3ed6de6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreCollectionCreateMapping.cs @@ -48,11 +48,13 @@ internal static class RedisVectorStoreCollectionCreateMapping /// /// The property definitions to map from. /// A dictionary that maps from a property name to the storage name that should be used when serializing it to json for data and vector properties. + /// A value indicating whether to include $. prefix for field names as required in JSON mode. /// The mapped Redis . /// Thrown if there are missing required or unsupported configuration options set. - public static Schema MapToSchema(IEnumerable properties, Dictionary storagePropertyNames) + public static Schema MapToSchema(IEnumerable properties, IReadOnlyDictionary storagePropertyNames, bool useDollarPrefix) { var schema = new Schema(); + var fieldNamePrefix = useDollarPrefix ? "$." : string.Empty; // Loop through all properties and create the index fields. foreach (var property in properties) @@ -79,7 +81,7 @@ public static Schema MapToSchema(IEnumerable properti { if (dataProperty.PropertyType == typeof(string) || (typeof(IEnumerable).IsAssignableFrom(dataProperty.PropertyType) && GetEnumerableType(dataProperty.PropertyType) == typeof(string))) { - schema.AddTextField(new FieldName($"$.{storageName}", storageName)); + schema.AddTextField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); } else { @@ -92,15 +94,15 @@ public static Schema MapToSchema(IEnumerable properti { if (dataProperty.PropertyType == typeof(string)) { - schema.AddTagField(new FieldName($"$.{storageName}", storageName)); + schema.AddTagField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); } else if (typeof(IEnumerable).IsAssignableFrom(dataProperty.PropertyType) && GetEnumerableType(dataProperty.PropertyType) == typeof(string)) { - schema.AddTagField(new FieldName($"$.{storageName}.*", storageName)); + schema.AddTagField(new FieldName($"{fieldNamePrefix}{storageName}.*", storageName)); } else if (RedisVectorStoreCollectionCreateMapping.s_supportedFilterableNumericDataTypes.Contains(dataProperty.PropertyType)) { - schema.AddNumericField(new FieldName($"$.{storageName}", storageName)); + schema.AddNumericField(new FieldName($"{fieldNamePrefix}{storageName}", storageName)); } else { @@ -123,7 +125,7 @@ public static Schema MapToSchema(IEnumerable properti var indexKind = GetSDKIndexKind(vectorProperty); var distanceAlgorithm = GetSDKDistanceAlgorithm(vectorProperty); var dimensions = vectorProperty.Dimensions.Value.ToString(CultureInfo.InvariantCulture); - schema.AddVectorField(new FieldName($"$.{storageName}", storageName), indexKind, new Dictionary() + schema.AddVectorField(new FieldName($"{fieldNamePrefix}{storageName}", storageName), indexKind, new Dictionary() { ["TYPE"] = "FLOAT32", ["DIM"] = dimensions, diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs index 0434b3c633ec..63eeda5a5e3e 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreOptions.cs @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Connectors.Redis; public sealed class RedisVectorStoreOptions { /// - /// An optional factory to use for constructing instances, if custom options are required. + /// An optional factory to use for constructing instances, if a custom record collection is required. /// public IRedisVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs new file mode 100644 index 000000000000..fd9d183330a4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Redis/RedisVectorStoreRecordFieldMapping.cs @@ -0,0 +1,32 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Runtime.InteropServices; + +namespace Microsoft.SemanticKernel.Connectors.Redis; + +/// +/// Contains helper methods for mapping fields to and from the format required by the Redis client sdk. +/// +internal static class RedisVectorStoreRecordFieldMapping +{ + /// + /// Convert a vector to a byte array as required by the Redis client sdk when using hashsets. + /// + /// The vector to convert. + /// The byte array. + public static byte[] ConvertVectorToBytes(ReadOnlyMemory vector) + { + return MemoryMarshal.AsBytes(vector.Span).ToArray(); + } + + /// + /// Convert a vector to a byte array as required by the Redis client sdk when using hashsets. + /// + /// The vector to convert. + /// The byte array. + public static byte[] ConvertVectorToBytes(ReadOnlyMemory vector) + { + return MemoryMarshal.AsBytes(vector.Span).ToArray(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs index 581a21afc52a..cdd1171ec219 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs @@ -146,6 +146,41 @@ public async IAsyncEnumerable ReadAllAsync(SqliteConnection conn, return null; } + public async IAsyncEnumerable ReadBatchAsync(SqliteConnection conn, + string collectionName, + string[] keys, + bool withEmbeddings = false, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using SqliteCommand cmd = conn.CreateCommand(); + var keyParameters = keys.Select((key, index) => $"@key{index}"); + var parameters = string.Join(", ", keyParameters); + + var selectFieldQuery = withEmbeddings ? "*" : "key, metadata, timestamp"; +#pragma warning disable CA2100 // Review SQL queries for security vulnerabilities + cmd.CommandText = $@" + SELECT {selectFieldQuery} FROM {TableName} + WHERE collection=@collection + AND key IN ({parameters})"; +#pragma warning restore CA2100 // Review SQL queries for security vulnerabilities + + cmd.Parameters.Add(new SqliteParameter("@collection", collectionName)); + for (int i = 0; i < keys.Length; i++) + { + cmd.Parameters.Add(new SqliteParameter($"@key{i}", keys[i])); + } + + using var dataReader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false); + while (await dataReader.ReadAsync(cancellationToken).ConfigureAwait(false)) + { + string key = dataReader.GetString("key"); + string metadata = dataReader.GetString("metadata"); + string embedding = withEmbeddings ? dataReader.GetString("embedding") : string.Empty; + string timestamp = dataReader.GetString("timestamp"); + yield return new DatabaseEntry() { Key = key, MetadataString = metadata, EmbeddingString = embedding, Timestamp = timestamp }; + } + } + public Task DeleteCollectionAsync(SqliteConnection conn, string collectionName, CancellationToken cancellationToken = default) { using SqliteCommand cmd = conn.CreateCommand(); diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs index 232b0e97b9dc..59c8591c0bf6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs @@ -87,21 +87,9 @@ public async IAsyncEnumerable UpsertBatchAsync(string collectionName, IE } /// - public async IAsyncEnumerable GetBatchAsync(string collectionName, IEnumerable keys, bool withEmbeddings = false, - [EnumeratorCancellation] CancellationToken cancellationToken = default) + public IAsyncEnumerable GetBatchAsync(string collectionName, IEnumerable keys, bool withEmbeddings = false, CancellationToken cancellationToken = default) { - foreach (var key in keys) - { - var result = await this.InternalGetAsync(this._dbConnection, collectionName, key, withEmbeddings, cancellationToken).ConfigureAwait(false); - if (result is not null) - { - yield return result; - } - else - { - yield break; - } - } + return this.InternalGetBatchAsync(this._dbConnection, collectionName, keys.ToArray(), withEmbeddings, cancellationToken); } /// @@ -283,5 +271,18 @@ await this._dbConnector.UpsertAsync( ParseTimestamp(entry.Value.Timestamp)); } + private async IAsyncEnumerable InternalGetBatchAsync( + SqliteConnection connection, + string collectionName, + string[] keys, bool withEmbedding, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + await foreach (DatabaseEntry dbEntry in this._dbConnector.ReadBatchAsync(connection, collectionName, keys, withEmbedding, cancellationToken).ConfigureAwait(false)) + { + ReadOnlyMemory vector = withEmbedding ? JsonSerializer.Deserialize>(dbEntry.EmbeddingString, JsonOptionsCache.Default) : ReadOnlyMemory.Empty; + yield return MemoryRecord.FromJsonMetadata(dbEntry.MetadataString, vector, dbEntry.Key, ParseTimestamp(dbEntry.Timestamp)); ; + } + } + #endregion } diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj index 7f75b9c28864..06901c892c22 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Connectors.Memory.Weaviate.csproj @@ -26,4 +26,8 @@ + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Converters/WeaviateDateTimeOffsetConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Converters/WeaviateDateTimeOffsetConverter.cs new file mode 100644 index 000000000000..754b6b10cbdd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Converters/WeaviateDateTimeOffsetConverter.cs @@ -0,0 +1,33 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Globalization; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Converts datetime type to RFC 3339 formatted string. +/// +internal sealed class WeaviateDateTimeOffsetConverter : JsonConverter +{ + private const string DateTimeFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK"; + + public override DateTimeOffset Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var dateString = reader.GetString(); + + if (string.IsNullOrWhiteSpace(dateString)) + { + return default; + } + + return DateTimeOffset.Parse(dateString, CultureInfo.InvariantCulture); + } + + public override void Write(Utf8JsonWriter writer, DateTimeOffset value, JsonSerializerOptions options) + { + writer.WriteStringValue(value.ToString(DateTimeFormat, CultureInfo.InvariantCulture)); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Converters/WeaviateNullableDateTimeOffsetConverter.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Converters/WeaviateNullableDateTimeOffsetConverter.cs new file mode 100644 index 000000000000..8dde4702aac0 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Converters/WeaviateNullableDateTimeOffsetConverter.cs @@ -0,0 +1,45 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Globalization; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Converts datetime type to RFC 3339 formatted string. +/// +internal sealed class WeaviateNullableDateTimeOffsetConverter : JsonConverter +{ + private const string DateTimeFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK"; + + public override DateTimeOffset? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.Null) + { + return null; + } + + var dateString = reader.GetString(); + + if (string.IsNullOrWhiteSpace(dateString)) + { + return null; + } + + return DateTimeOffset.Parse(dateString, CultureInfo.InvariantCulture); + } + + public override void Write(Utf8JsonWriter writer, DateTimeOffset? value, JsonSerializerOptions options) + { + if (value.HasValue) + { + writer.WriteStringValue(value.Value.ToString(DateTimeFormat, CultureInfo.InvariantCulture)); + } + else + { + writer.WriteNullValue(); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs index 255dcf91363d..5743f16108c6 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/Http/HttpRequest.cs @@ -4,7 +4,6 @@ using System.Text; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -14,7 +13,6 @@ internal static class HttpRequest { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - Converters = { JsonOptionsCache.ReadOnlyMemoryConverter }, }; public static HttpRequestMessage CreateGetRequest(string url, object? payload = null) @@ -33,9 +31,20 @@ public static HttpRequestMessage CreatePostRequest(string url, object? payload = }; } - public static HttpRequestMessage CreateDeleteRequest(string url) + public static HttpRequestMessage CreateDeleteRequest(string url, object? payload = null) { - return new(HttpMethod.Delete, url); + return new(HttpMethod.Delete, url) + { + Content = GetJsonContent(payload) + }; + } + + public static HttpRequestMessage CreatePutRequest(string url, object? payload = null) + { + return new(HttpMethod.Put, url) + { + Content = GetJsonContent(payload) + }; } private static StringContent? GetJsonContent(object? payload) diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateCreateCollectionSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateCreateCollectionSchemaRequest.cs new file mode 100644 index 000000000000..96015b5323b7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateCreateCollectionSchemaRequest.cs @@ -0,0 +1,36 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateCreateCollectionSchemaRequest +{ + private const string ApiRoute = "schema"; + + [JsonConstructor] + public WeaviateCreateCollectionSchemaRequest() { } + + public WeaviateCreateCollectionSchemaRequest(WeaviateCollectionSchema collectionSchema) + { + this.CollectionName = collectionSchema.CollectionName; + this.VectorConfigurations = collectionSchema.VectorConfigurations; + this.Properties = collectionSchema.Properties; + } + + [JsonPropertyName("class")] + public string? CollectionName { get; set; } + + [JsonPropertyName("vectorConfig")] + public Dictionary? VectorConfigurations { get; set; } + + [JsonPropertyName("properties")] + public List? Properties { get; set; } + + public HttpRequestMessage Build() + { + return HttpRequest.CreatePostRequest(ApiRoute, this); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteCollectionSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteCollectionSchemaRequest.cs new file mode 100644 index 000000000000..4cdbaa9fd8da --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteCollectionSchemaRequest.cs @@ -0,0 +1,19 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateDeleteCollectionSchemaRequest(string collectionName) +{ + private const string ApiRoute = "schema"; + + [JsonIgnore] + public string CollectionName { get; set; } = collectionName; + + public HttpRequestMessage Build() + { + return HttpRequest.CreateDeleteRequest($"{ApiRoute}/{this.CollectionName}"); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteObjectBatchRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteObjectBatchRequest.cs new file mode 100644 index 000000000000..c7948e1ae530 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteObjectBatchRequest.cs @@ -0,0 +1,27 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateDeleteObjectBatchRequest +{ + private const string ApiRoute = "batch/objects"; + + [JsonConstructor] + public WeaviateDeleteObjectBatchRequest() { } + + public WeaviateDeleteObjectBatchRequest(WeaviateQueryMatch match) + { + this.Match = match; + } + + [JsonPropertyName("match")] + public WeaviateQueryMatch? Match { get; set; } + + public HttpRequestMessage Build() + { + return HttpRequest.CreateDeleteRequest(ApiRoute, this); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteObjectRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteObjectRequest.cs new file mode 100644 index 000000000000..e88b64b1b3fe --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateDeleteObjectRequest.cs @@ -0,0 +1,23 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateDeleteObjectRequest(string collectionName, Guid id) +{ + private const string ApiRoute = "objects"; + + [JsonIgnore] + public string CollectionName { get; set; } = collectionName; + + [JsonIgnore] + public Guid Id { get; set; } = id; + + public HttpRequestMessage Build() + { + return HttpRequest.CreateDeleteRequest($"{ApiRoute}/{this.CollectionName}/{this.Id}"); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionObjectRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionObjectRequest.cs new file mode 100644 index 000000000000..5ddb40f438a1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionObjectRequest.cs @@ -0,0 +1,35 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateGetCollectionObjectRequest(string collectionName, Guid id, bool includeVectors) +{ + private const string ApiRoute = "objects"; + private const string IncludeQueryParameterName = "include"; + private const string IncludeVectorQueryParameterValue = "vector"; + + [JsonIgnore] + public string CollectionName { get; set; } = collectionName; + + [JsonIgnore] + public Guid Id { get; set; } = id; + + [JsonIgnore] + public bool IncludeVectors { get; set; } = includeVectors; + + public HttpRequestMessage Build() + { + var uri = $"{ApiRoute}/{this.CollectionName}/{this.Id}"; + + if (this.IncludeVectors) + { + uri += $"?{IncludeQueryParameterName}={IncludeVectorQueryParameterValue}"; + } + + return HttpRequest.CreateGetRequest(uri); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionSchemaRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionSchemaRequest.cs new file mode 100644 index 000000000000..d863f4dc74d6 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionSchemaRequest.cs @@ -0,0 +1,19 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateGetCollectionSchemaRequest(string collectionName) +{ + private const string ApiRoute = "schema"; + + [JsonIgnore] + public string CollectionName { get; set; } = collectionName; + + public HttpRequestMessage Build() + { + return HttpRequest.CreateGetRequest($"{ApiRoute}/{this.CollectionName}"); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionSchemaResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionSchemaResponse.cs new file mode 100644 index 000000000000..7277ac7d929f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionSchemaResponse.cs @@ -0,0 +1,11 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateGetCollectionSchemaResponse +{ + [JsonPropertyName("class")] + public string? CollectionName { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs new file mode 100644 index 000000000000..f31017ca8685 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsRequest.cs @@ -0,0 +1,15 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateGetCollectionsRequest +{ + private const string ApiRoute = "schema"; + + public HttpRequestMessage Build() + { + return HttpRequest.CreateGetRequest(ApiRoute, this); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsResponse.cs new file mode 100644 index 000000000000..84c1b5d6611c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateGetCollectionsResponse.cs @@ -0,0 +1,12 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateGetCollectionsResponse +{ + [JsonPropertyName("classes")] + public List? Collections { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateUpsertCollectionObjectBatchRequest.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateUpsertCollectionObjectBatchRequest.cs new file mode 100644 index 000000000000..4a22afbc26ed --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateUpsertCollectionObjectBatchRequest.cs @@ -0,0 +1,32 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Net.Http; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateUpsertCollectionObjectBatchRequest +{ + private const string ApiRoute = "batch/objects"; + + [JsonConstructor] + public WeaviateUpsertCollectionObjectBatchRequest() { } + + public WeaviateUpsertCollectionObjectBatchRequest(List collectionObjects) + { + this.CollectionObjects = collectionObjects; + } + + [JsonPropertyName("fields")] + public List Fields { get; set; } = [WeaviateConstants.ReservedKeyPropertyName]; + + [JsonPropertyName("objects")] + public List? CollectionObjects { get; set; } + + public HttpRequestMessage Build() + { + return HttpRequest.CreatePostRequest(ApiRoute, this); + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateUpsertCollectionObjectBatchResponse.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateUpsertCollectionObjectBatchResponse.cs new file mode 100644 index 000000000000..1e540cdc5872 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/HttpV2/WeaviateUpsertCollectionObjectBatchResponse.cs @@ -0,0 +1,15 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateUpsertCollectionObjectBatchResponse +{ + [JsonPropertyName("id")] + public Guid Id { get; set; } + + [JsonPropertyName("result")] + public WeaviateOperationResult? Result { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs new file mode 100644 index 000000000000..66ca41ca4a54 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/IWeaviateVectorStoreRecordCollectionFactory.cs @@ -0,0 +1,28 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Interface for constructing Weaviate instances when using to retrieve these. +/// +public interface IWeaviateVectorStoreRecordCollectionFactory +{ + /// + /// Constructs a new instance of the . + /// + /// The data type of the record key. + /// The data model to use for adding, updating and retrieving data from storage. + /// that is used to interact with Weaviate API. + /// The name of the collection to connect to. + /// An optional record definition that defines the schema of the record type. If not present, attributes on will be used. + /// The new instance of . + IVectorStoreRecordCollection CreateVectorStoreRecordCollection( + HttpClient httpClient, + string name, + VectorStoreRecordDefinition? vectorStoreRecordDefinition) + where TKey : notnull + where TRecord : class; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs new file mode 100644 index 000000000000..e0f403ddb0e8 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchema.cs @@ -0,0 +1,24 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateCollectionSchema +{ + [JsonConstructor] + public WeaviateCollectionSchema(string collectionName) + { + this.CollectionName = collectionName; + } + + [JsonPropertyName("class")] + public string CollectionName { get; set; } + + [JsonPropertyName("vectorConfig")] + public Dictionary VectorConfigurations { get; set; } = []; + + [JsonPropertyName("properties")] + public List Properties { get; set; } = []; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaProperty.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaProperty.cs new file mode 100644 index 000000000000..d8719fe66764 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaProperty.cs @@ -0,0 +1,21 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateCollectionSchemaProperty +{ + [JsonPropertyName("name")] + public string? Name { get; set; } + + [JsonPropertyName("dataType")] + public List DataType { get; set; } = []; + + [JsonPropertyName("indexFilterable")] + public bool IndexFilterable { get; set; } + + [JsonPropertyName("indexSearchable")] + public bool IndexSearchable { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs new file mode 100644 index 000000000000..75bd33471eb7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorConfig.cs @@ -0,0 +1,20 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateCollectionSchemaVectorConfig +{ + private const string DefaultVectorizer = "none"; + + [JsonPropertyName("vectorizer")] + public Dictionary Vectorizer { get; set; } = new() { [DefaultVectorizer] = null }; + + [JsonPropertyName("vectorIndexType")] + public string? VectorIndexType { get; set; } + + [JsonPropertyName("vectorIndexConfig")] + public WeaviateCollectionSchemaVectorIndexConfig? VectorIndexConfig { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorIndexConfig.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorIndexConfig.cs new file mode 100644 index 000000000000..49d01896d395 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateCollectionSchemaVectorIndexConfig.cs @@ -0,0 +1,11 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateCollectionSchemaVectorIndexConfig +{ + [JsonPropertyName("distance")] + public string? Distance { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResult.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResult.cs new file mode 100644 index 000000000000..fc76ac8c2435 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResult.cs @@ -0,0 +1,20 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateOperationResult +{ + private const string Success = nameof(Success); + + [JsonPropertyName("errors")] + public WeaviateOperationResultErrors? Errors { get; set; } + + [JsonPropertyName("status")] + public string? Status { get; set; } + + [JsonIgnore] + public bool? IsSuccess => this.Status?.Equals(Success, StringComparison.OrdinalIgnoreCase); +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResultError.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResultError.cs new file mode 100644 index 000000000000..51470a0af40a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResultError.cs @@ -0,0 +1,11 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateOperationResultError +{ + [JsonPropertyName("message")] + public string? Message { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResultErrors.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResultErrors.cs new file mode 100644 index 000000000000..c76555f2914b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateOperationResultErrors.cs @@ -0,0 +1,12 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal class WeaviateOperationResultErrors +{ + [JsonPropertyName("error")] + public List? Errors { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateQueryMatch.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateQueryMatch.cs new file mode 100644 index 000000000000..4bb6b431807d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateQueryMatch.cs @@ -0,0 +1,14 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateQueryMatch +{ + [JsonPropertyName("class")] + public string? CollectionName { get; set; } + + [JsonPropertyName("where")] + public WeaviateQueryMatchWhereClause? WhereClause { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateQueryMatchWhereClause.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateQueryMatchWhereClause.cs new file mode 100644 index 000000000000..b2423ab868fc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/ModelV2/WeaviateQueryMatchWhereClause.cs @@ -0,0 +1,18 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateQueryMatchWhereClause +{ + [JsonPropertyName("operator")] + public string? Operator { get; set; } + + [JsonPropertyName("path")] + public List Path { get; set; } = []; + + [JsonPropertyName("valueTextArray")] + public List Values { get; set; } = []; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs new file mode 100644 index 000000000000..57fa4695b0bf --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateConstants.cs @@ -0,0 +1,18 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateConstants +{ + /// Reserved key property name in Weaviate. + internal const string ReservedKeyPropertyName = "id"; + + /// Reserved data property name in Weaviate. + internal const string ReservedDataPropertyName = "properties"; + + /// Reserved vector property name in Weaviate. + internal const string ReservedVectorPropertyName = "vectors"; + + /// Reserved collection property name in Weaviate. + internal const string ReservedCollectionPropertyName = "class"; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs new file mode 100644 index 000000000000..9275421a9659 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateGenericDataModelMapper.cs @@ -0,0 +1,152 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// A mapper that maps between the generic Semantic Kernel data model and the model that the data is stored under, within Weaviate. +/// +internal sealed class WeaviateGenericDataModelMapper : IVectorStoreRecordMapper, JsonObject> +{ + /// The name of the Weaviate collection. + private readonly string _collectionName; + + /// A property of record definition. + private readonly VectorStoreRecordKeyProperty _keyProperty; + + /// A collection of properties of record definition. + private readonly IReadOnlyList _dataProperties; + + /// A collection of properties of record definition. + private readonly IReadOnlyList _vectorProperties; + + /// A dictionary that maps from a property name to the storage name. + private readonly IReadOnlyDictionary _storagePropertyNames; + + /// A for serialization/deserialization of record properties. + private readonly JsonSerializerOptions _jsonSerializerOptions; + + /// + /// Initializes a new instance of the class. + /// + /// The name of the Weaviate collection + /// A property of record definition. + /// A collection of properties of record definition. + /// A collection of properties of record definition. + /// A dictionary that maps from a property name to the storage name. + /// A for serialization/deserialization of record properties. + public WeaviateGenericDataModelMapper( + string collectionName, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList dataProperties, + IReadOnlyList vectorProperties, + IReadOnlyDictionary storagePropertyNames, + JsonSerializerOptions jsonSerializerOptions) + { + Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNull(keyProperty); + Verify.NotNull(dataProperties); + Verify.NotNull(vectorProperties); + Verify.NotNull(storagePropertyNames); + Verify.NotNull(jsonSerializerOptions); + + this._collectionName = collectionName; + this._keyProperty = keyProperty; + this._dataProperties = dataProperties; + this._vectorProperties = vectorProperties; + this._storagePropertyNames = storagePropertyNames; + this._jsonSerializerOptions = jsonSerializerOptions; + } + + public JsonObject MapFromDataToStorageModel(VectorStoreGenericDataModel dataModel) + { + Verify.NotNull(dataModel); + + // Transform generic data model to Weaviate object model. + var weaviateObjectModel = new JsonObject + { + { WeaviateConstants.ReservedCollectionPropertyName, JsonValue.Create(this._collectionName) }, + { WeaviateConstants.ReservedKeyPropertyName, dataModel.Key }, + { WeaviateConstants.ReservedDataPropertyName, new JsonObject() }, + { WeaviateConstants.ReservedVectorPropertyName, new JsonObject() }, + }; + + // Populate data properties. + foreach (var property in this._dataProperties) + { + if (dataModel.Data is not null && dataModel.Data.TryGetValue(property.DataModelPropertyName, out var dataValue)) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + + weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![storagePropertyName] = dataValue is not null ? + JsonSerializer.SerializeToNode(dataValue, property.PropertyType, this._jsonSerializerOptions) : + null; + } + } + + // Populate vector properties. + foreach (var property in this._vectorProperties) + { + if (dataModel.Vectors is not null && dataModel.Vectors.TryGetValue(property.DataModelPropertyName, out var vectorValue)) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + + weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![storagePropertyName] = vectorValue is not null ? + JsonSerializer.SerializeToNode(vectorValue, property.PropertyType, this._jsonSerializerOptions) : + null; + } + } + + return weaviateObjectModel; + } + + public VectorStoreGenericDataModel MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + { + Verify.NotNull(storageModel); + + // Create variables to store the response properties. + var key = storageModel[WeaviateConstants.ReservedKeyPropertyName]?.GetValue(); + + if (!key.HasValue) + { + throw new VectorStoreRecordMappingException("No key property was found in the record retrieved from storage."); + } + + var dataProperties = new Dictionary(); + var vectorProperties = new Dictionary(); + + // Populate data properties. + foreach (var property in this._dataProperties) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + var jsonObject = storageModel[WeaviateConstants.ReservedDataPropertyName] as JsonObject; + + if (jsonObject is not null && jsonObject.TryGetPropertyValue(storagePropertyName, out var dataValue)) + { + dataProperties.Add(property.DataModelPropertyName, dataValue.Deserialize(property.PropertyType, this._jsonSerializerOptions)); + } + } + + // Populate vector properties. + if (options.IncludeVectors) + { + foreach (var property in this._vectorProperties) + { + var storagePropertyName = this._storagePropertyNames[property.DataModelPropertyName]; + var jsonObject = storageModel[WeaviateConstants.ReservedVectorPropertyName] as JsonObject; + + if (jsonObject is not null && jsonObject.TryGetPropertyValue(storagePropertyName, out var vectorValue)) + { + vectorProperties.Add(property.DataModelPropertyName, vectorValue.Deserialize(property.PropertyType, this._jsonSerializerOptions)); + } + } + } + + return new VectorStoreGenericDataModel(key.Value) { Data = dataProperties, Vectors = vectorProperties }; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs new file mode 100644 index 000000000000..57d10183d7de --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateKernelBuilderExtensions.cs @@ -0,0 +1,35 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Weaviate instances on the . +/// +public static class WeaviateKernelBuilderExtensions +{ + /// + /// Register a Weaviate with the specified service ID. + /// + /// The builder to register the on. + /// + /// that is used to interact with Weaviate API. + /// should point to remote or local cluster and API key can be configured via . + /// It's also possible to provide these parameters via . + /// + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// The kernel builder. + public static IKernelBuilder AddWeaviateVectorStore( + this IKernelBuilder builder, + HttpClient? httpClient = default, + WeaviateVectorStoreOptions? options = default, + string? serviceId = default) + { + builder.Services.AddWeaviateVectorStore(httpClient, options, serviceId); + return builder; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs index a5cca838cb3b..f6c4364222ab 100644 --- a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateMemoryStore.cs @@ -17,7 +17,6 @@ using Microsoft.Extensions.Logging.Abstractions; using Microsoft.SemanticKernel.Http; using Microsoft.SemanticKernel.Memory; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Connectors.Weaviate; @@ -53,7 +52,6 @@ public partial class WeaviateMemoryStore : IMemoryStore { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - Converters = { JsonOptionsCache.ReadOnlyMemoryConverter } }; private readonly HttpClient _httpClient; diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs new file mode 100644 index 000000000000..cdaff1ddd070 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateServiceCollectionExtensions.cs @@ -0,0 +1,45 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods to register Weaviate instances on an +/// +public static class WeaviateServiceCollectionExtensions +{ + /// + /// Register a Weaviate with the specified service ID. + /// + /// The to register the on. + /// + /// that is used to interact with Weaviate API. + /// should point to remote or local cluster and API key can be configured via . + /// It's also possible to provide these parameters via . + /// + /// Optional options to further configure the . + /// An optional service id to use as the service key. + /// Service collection. + public static IServiceCollection AddWeaviateVectorStore( + this IServiceCollection services, + HttpClient? httpClient = default, + WeaviateVectorStoreOptions? options = default, + string? serviceId = default) + { + services.AddKeyedTransient( + serviceId, + (sp, obj) => + { + var selectedHttpClient = HttpClientProvider.GetHttpClient(httpClient, sp); + var selectedOptions = options ?? sp.GetService(); + return new WeaviateVectorStore(selectedHttpClient, options); + }); + + return services; + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs new file mode 100644 index 000000000000..ddd605236c27 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStore.cs @@ -0,0 +1,93 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Threading; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Class for accessing the list of collections in a Weaviate vector store. +/// +/// +/// This class can be used with collections of any schema type, but requires you to provide schema information when getting a collection. +/// +public sealed class WeaviateVectorStore : IVectorStore +{ + /// that is used to interact with Weaviate API. + private readonly HttpClient _httpClient; + + /// Optional configuration options for this class. + private readonly WeaviateVectorStoreOptions _options; + + /// + /// Initializes a new instance of the class. + /// + /// + /// that is used to interact with Weaviate API. + /// should point to remote or local cluster and API key can be configured via . + /// It's also possible to provide these parameters via . + /// + /// Optional configuration options for this class. + public WeaviateVectorStore(HttpClient httpClient, WeaviateVectorStoreOptions? options = null) + { + Verify.NotNull(httpClient); + + this._httpClient = httpClient; + this._options = options ?? new(); + } + + /// + public IVectorStoreRecordCollection GetCollection(string name, VectorStoreRecordDefinition? vectorStoreRecordDefinition = null) + where TKey : notnull + where TRecord : class + { + if (typeof(TKey) != typeof(Guid)) + { + throw new NotSupportedException($"Only {nameof(Guid)} key is supported."); + } + + if (this._options.VectorStoreCollectionFactory is not null) + { + return this._options.VectorStoreCollectionFactory.CreateVectorStoreRecordCollection( + this._httpClient, + name, + vectorStoreRecordDefinition); + } + + var recordCollection = new WeaviateVectorStoreRecordCollection( + this._httpClient, + name, + new() + { + VectorStoreRecordDefinition = vectorStoreRecordDefinition, + Endpoint = this._options.Endpoint, + ApiKey = this._options.ApiKey + }) as IVectorStoreRecordCollection; + + return recordCollection!; + } + + /// + public async IAsyncEnumerable ListCollectionNamesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + using var request = new WeaviateGetCollectionsRequest().Build(); + + var response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); + var responseContent = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + var collectionResponse = JsonSerializer.Deserialize(responseContent); + + if (collectionResponse?.Collections is not null) + { + foreach (var collection in collectionResponse.Collections) + { + yield return collection.CollectionName; + } + } + } +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs new file mode 100644 index 000000000000..a68150d8dd98 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreCollectionCreateMapping.cs @@ -0,0 +1,196 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Class to construct Weaviate collection schema with configuration for data and vector properties. +/// More information here: . +/// +internal static class WeaviateVectorStoreCollectionCreateMapping +{ + /// + /// Maps record type properties to Weaviate collection schema for collection creation. + /// + /// The name of the vector store collection. + /// Collection of record data properties. + /// Collection of record vector properties. + /// A dictionary that maps from a property name to the storage name that should be used when serializing it to JSON for data and vector properties. + /// Weaviate collection schema. + public static WeaviateCollectionSchema MapToSchema( + string collectionName, + IEnumerable dataProperties, + IEnumerable vectorProperties, + IReadOnlyDictionary storagePropertyNames) + { + var schema = new WeaviateCollectionSchema(collectionName); + + // Handle data properties. + foreach (var property in dataProperties) + { + schema.Properties.Add(new WeaviateCollectionSchemaProperty + { + Name = storagePropertyNames[property.DataModelPropertyName], + DataType = [MapType(property.PropertyType)], + IndexFilterable = property.IsFilterable, + IndexSearchable = property.IsFullTextSearchable + }); + } + + // Handle vector properties. + foreach (var property in vectorProperties) + { + var vectorPropertyName = storagePropertyNames[property.DataModelPropertyName]; + schema.VectorConfigurations.Add(vectorPropertyName, new WeaviateCollectionSchemaVectorConfig + { + VectorIndexType = MapIndexKind(property.IndexKind, vectorPropertyName), + VectorIndexConfig = new WeaviateCollectionSchemaVectorIndexConfig + { + Distance = MapDistanceFunction(property.DistanceFunction, vectorPropertyName) + } + }); + } + + return schema; + } + + #region private + + /// + /// Maps record vector property index kind to Weaviate index kind. + /// More information here: . + /// + private static string MapIndexKind(string? indexKind, string vectorPropertyName) + { + const string Hnsw = "hnsw"; + const string Flat = "flat"; + const string Dynamic = "dynamic"; + + // If index kind is not provided, use default one. + if (string.IsNullOrWhiteSpace(indexKind)) + { + return Hnsw; + } + + return indexKind switch + { + IndexKind.Hnsw => Hnsw, + IndexKind.Flat => Flat, + IndexKind.Dynamic => Dynamic, + _ => throw new InvalidOperationException( + $"Index kind '{indexKind}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Weaviate VectorStore. " + + $"Supported index kinds: {string.Join(", ", + IndexKind.Hnsw, + IndexKind.Flat, + IndexKind.Dynamic)}") + }; + } + + /// + /// Maps record vector property distance function to Weaviate distance function. + /// More information here: . + /// + private static string MapDistanceFunction(string? distanceFunction, string vectorPropertyName) + { + const string Cosine = "cosine"; + const string Dot = "dot"; + const string EuclideanSquared = "l2-squared"; + const string Hamming = "hamming"; + const string Manhattan = "manhattan"; + + // If distance function is not provided, use default one. + if (string.IsNullOrWhiteSpace(distanceFunction)) + { + return Cosine; + } + + return distanceFunction switch + { + DistanceFunction.CosineDistance => Cosine, + DistanceFunction.DotProductSimilarity => Dot, + DistanceFunction.EuclideanSquaredDistance => EuclideanSquared, + DistanceFunction.Hamming => Hamming, + DistanceFunction.ManhattanDistance => Manhattan, + _ => throw new InvalidOperationException( + $"Distance function '{distanceFunction}' on {nameof(VectorStoreRecordVectorProperty)} '{vectorPropertyName}' is not supported by the Weaviate VectorStore. " + + $"Supported distance functions: {string.Join(", ", + DistanceFunction.CosineDistance, + DistanceFunction.DotProductSimilarity, + DistanceFunction.EuclideanSquaredDistance, + DistanceFunction.Hamming, + DistanceFunction.ManhattanDistance)}") + }; + } + + /// + /// Maps record property type to Weaviate data type taking into account if the type is a collection or single value. + /// + private static string MapType(Type type) + { + // Check if the type is a collection. + if (typeof(IEnumerable).IsAssignableFrom(type) && type != typeof(string)) + { + var elementType = GetCollectionElementType(type); + + // If type is a collection, handle collection element type. + return MapType(elementType, isCollection: true); + } + + // If type is not a collection, handle single type. + return MapType(type, isCollection: false); + } + + /// + /// Maps record property type to Weaviate data type. + /// More information here: . + /// + private static string MapType(Type type, bool isCollection) + { + return type switch + { + Type t when t == typeof(string) => isCollection ? "text[]" : "text", + Type t when t == typeof(int) || t == typeof(long) || t == typeof(short) || t == typeof(byte) || + t == typeof(int?) || t == typeof(long?) || t == typeof(short?) || t == typeof(byte?) => isCollection ? "int[]" : "int", + Type t when t == typeof(float) || t == typeof(double) || t == typeof(decimal) || + t == typeof(float?) || t == typeof(double?) || t == typeof(decimal?) => isCollection ? "number[]" : "number", + Type t when t == typeof(DateTime) || t == typeof(DateTime?) || + t == typeof(DateTimeOffset) || t == typeof(DateTimeOffset?) => isCollection ? "date[]" : "date", + Type t when t == typeof(Guid) || t == typeof(Guid?) => isCollection ? "uuid[]" : "uuid", + Type t when t == typeof(bool) || t == typeof(bool?) => isCollection ? "boolean[]" : "boolean", + _ => isCollection ? "object[]" : "object", + }; + } + + /// + /// Gets the element type of a collection. + /// + /// + /// For example, when is , returned type will be generic parameter . + /// + private static Type GetCollectionElementType(Type type) + { + if (type.IsArray) + { + var elementType = type.GetElementType(); + + if (elementType is not null) + { + return elementType; + } + } + + if (type.IsGenericType) + { + return type.GetGenericArguments().First(); + } + + return typeof(object); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs new file mode 100644 index 000000000000..9feab8c9047d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreOptions.cs @@ -0,0 +1,29 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Options when creating a . +/// +public sealed class WeaviateVectorStoreOptions +{ + /// + /// An optional factory to use for constructing instances, if a custom record collection is required. + /// + public IWeaviateVectorStoreRecordCollectionFactory? VectorStoreCollectionFactory { get; init; } + + /// + /// Weaviate endpoint for remote or local cluster. + /// + public Uri? Endpoint { get; set; } = null; + + /// + /// Weaviate API key. + /// + /// + /// This parameter is optional because authentication may be disabled in local clusters for testing purposes. + /// + public string? ApiKey { get; set; } = null; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs new file mode 100644 index 000000000000..ac2ec7439630 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollection.cs @@ -0,0 +1,420 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Runtime.CompilerServices; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Data; +using Microsoft.SemanticKernel.Http; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Service for storing and retrieving vector records, that uses Weaviate as the underlying storage. +/// +/// The data model to use for adding, updating and retrieving data from storage. +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix +public sealed class WeaviateVectorStoreRecordCollection : IVectorStoreRecordCollection where TRecord : class +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix +{ + /// The name of this database for telemetry purposes. + private const string DatabaseName = "Weaviate"; + + /// A set of types that a key on the provided model may have. + private static readonly HashSet s_supportedKeyTypes = + [ + typeof(Guid) + ]; + + /// A set of types that vectors on the provided model may have. + private static readonly HashSet s_supportedVectorTypes = + [ + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?), + typeof(ReadOnlyMemory), + typeof(ReadOnlyMemory?) + ]; + + /// A set of types that data properties on the provided model may have. + private static readonly HashSet s_supportedDataTypes = + [ + typeof(string), + typeof(bool), + typeof(bool?), + typeof(int), + typeof(int?), + typeof(long), + typeof(long?), + typeof(short), + typeof(short?), + typeof(byte), + typeof(byte?), + typeof(float), + typeof(float?), + typeof(double), + typeof(double?), + typeof(decimal), + typeof(decimal?), + typeof(DateTime), + typeof(DateTime?), + typeof(DateTimeOffset), + typeof(DateTimeOffset?), + typeof(Guid), + typeof(Guid?) + ]; + + /// Default JSON serializer options. + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = + { + new WeaviateDateTimeOffsetConverter(), + new WeaviateNullableDateTimeOffsetConverter() + } + }; + + /// that is used to interact with Weaviate API. + private readonly HttpClient _httpClient; + + /// Optional configuration options for this class. + private readonly WeaviateVectorStoreRecordCollectionOptions _options; + + /// A helper to access property information for the current data model and record definition. + private readonly VectorStoreRecordPropertyReader _propertyReader; + + /// The mapper to use when mapping between the consumer data model and the Weaviate record. + private readonly IVectorStoreRecordMapper _mapper; + + /// Weaviate endpoint. + private readonly Uri _endpoint; + + /// Weaviate API key. + private readonly string? _apiKey; + + /// + public string CollectionName { get; } + + /// + /// Initializes a new instance of the class. + /// + /// + /// that is used to interact with Weaviate API. + /// should point to remote or local cluster and API key can be configured via . + /// It's also possible to provide these parameters via . + /// + /// The name of the collection that this will access. + /// Optional configuration options for this class. + public WeaviateVectorStoreRecordCollection( + HttpClient httpClient, + string collectionName, + WeaviateVectorStoreRecordCollectionOptions? options = default) + { + // Verify. + Verify.NotNull(httpClient); + Verify.NotNullOrWhiteSpace(collectionName); + + var endpoint = (options?.Endpoint ?? httpClient.BaseAddress) ?? throw new ArgumentException($"Weaviate endpoint should be provided via HttpClient.BaseAddress property or {nameof(WeaviateVectorStoreRecordCollectionOptions)} options parameter."); + + // Assign. + this._httpClient = httpClient; + this._endpoint = endpoint; + this.CollectionName = collectionName; + this._options = options ?? new(); + this._apiKey = this._options.ApiKey; + this._propertyReader = new VectorStoreRecordPropertyReader( + typeof(TRecord), + this._options.VectorStoreRecordDefinition, + new() + { + RequiresAtLeastOneVector = false, + SupportsMultipleKeys = false, + SupportsMultipleVectors = true, + JsonSerializerOptions = s_jsonSerializerOptions + }); + + // Validate property types. + this._propertyReader.VerifyKeyProperties(s_supportedKeyTypes); + this._propertyReader.VerifyDataProperties(s_supportedDataTypes, supportEnumerable: true); + this._propertyReader.VerifyVectorProperties(s_supportedVectorTypes); + + // Assign mapper. + this._mapper = this.InitializeMapper(); + } + + /// + public Task CollectionExistsAsync(CancellationToken cancellationToken = default) + { + const string OperationName = "GetCollectionSchema"; + + return this.RunOperationAsync(OperationName, async () => + { + var request = new WeaviateGetCollectionSchemaRequest(this.CollectionName).Build(); + + var response = await this + .ExecuteRequestWithNotFoundHandlingAsync(request, cancellationToken) + .ConfigureAwait(false); + + return response != null; + }); + } + + /// + public Task CreateCollectionAsync(CancellationToken cancellationToken = default) + { + const string OperationName = "CreateCollectionSchema"; + + return this.RunOperationAsync(OperationName, () => + { + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + this.CollectionName, + this._propertyReader.DataProperties, + this._propertyReader.VectorProperties, + this._propertyReader.JsonPropertyNamesMap); + + var request = new WeaviateCreateCollectionSchemaRequest(schema).Build(); + + return this.ExecuteRequestAsync(request, cancellationToken); + }); + } + + /// + public async Task CreateCollectionIfNotExistsAsync(CancellationToken cancellationToken = default) + { + if (!await this.CollectionExistsAsync(cancellationToken).ConfigureAwait(false)) + { + await this.CreateCollectionAsync(cancellationToken).ConfigureAwait(false); + } + } + + /// + public Task DeleteCollectionAsync(CancellationToken cancellationToken = default) + { + const string OperationName = "DeleteCollectionSchema"; + + return this.RunOperationAsync(OperationName, () => + { + var request = new WeaviateDeleteCollectionSchemaRequest(this.CollectionName).Build(); + + return this.ExecuteRequestAsync(request, cancellationToken); + }); + } + + /// + public Task DeleteAsync(Guid key, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + const string OperationName = "DeleteObject"; + + return this.RunOperationAsync(OperationName, () => + { + var request = new WeaviateDeleteObjectRequest(this.CollectionName, key).Build(); + + return this.ExecuteRequestAsync(request, cancellationToken); + }); + } + + /// + public Task DeleteBatchAsync(IEnumerable keys, DeleteRecordOptions? options = null, CancellationToken cancellationToken = default) + { + const string OperationName = "DeleteObjectBatch"; + const string ContainsAnyOperator = "ContainsAny"; + + return this.RunOperationAsync(OperationName, () => + { + var match = new WeaviateQueryMatch + { + CollectionName = this.CollectionName, + WhereClause = new WeaviateQueryMatchWhereClause + { + Operator = ContainsAnyOperator, + Path = [WeaviateConstants.ReservedKeyPropertyName], + Values = keys.Select(key => key.ToString()).ToList() + } + }; + + var request = new WeaviateDeleteObjectBatchRequest(match).Build(); + + return this.ExecuteRequestAsync(request, cancellationToken); + }); + } + + /// + public Task GetAsync(Guid key, GetRecordOptions? options = null, CancellationToken cancellationToken = default) + { + const string OperationName = "GetCollectionObject"; + + return this.RunOperationAsync(OperationName, async () => + { + var includeVectors = options?.IncludeVectors is true; + var request = new WeaviateGetCollectionObjectRequest(this.CollectionName, key, includeVectors).Build(); + + var jsonObject = await this.ExecuteRequestWithNotFoundHandlingAsync(request, cancellationToken).ConfigureAwait(false); + + if (jsonObject is null) + { + return null; + } + + return VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromStorageToDataModel(jsonObject!, new() { IncludeVectors = includeVectors })); + }); + } + + /// + public async IAsyncEnumerable GetBatchAsync( + IEnumerable keys, + GetRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var tasks = keys.Select(key => this.GetAsync(key, options, cancellationToken)); + + var records = await Task.WhenAll(tasks).ConfigureAwait(false); + + foreach (var record in records) + { + if (record is not null) + { + yield return record; + } + } + } + + /// + public async Task UpsertAsync(TRecord record, UpsertRecordOptions? options = null, CancellationToken cancellationToken = default) + { + return await this.UpsertBatchAsync([record], options, cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + } + + /// + public async IAsyncEnumerable UpsertBatchAsync( + IEnumerable records, + UpsertRecordOptions? options = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + const string OperationName = "UpsertCollectionObject"; + + var responses = await this.RunOperationAsync(OperationName, async () => + { + var jsonObjects = records.Select(record => VectorStoreErrorHandler.RunModelConversion( + DatabaseName, + this.CollectionName, + OperationName, + () => this._mapper.MapFromDataToStorageModel(record))).ToList(); + + var request = new WeaviateUpsertCollectionObjectBatchRequest(jsonObjects).Build(); + + return await this.ExecuteRequestAsync>(request, cancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + + if (responses is not null) + { + foreach (var response in responses) + { + if (response?.Result?.IsSuccess is true) + { + yield return response.Id; + } + } + } + } + + #region private + + private Task ExecuteRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + request.RequestUri = new Uri(this._endpoint, request.RequestUri!); + + if (!string.IsNullOrWhiteSpace(this._apiKey)) + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", this._apiKey); + } + + return this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken); + } + + private async Task ExecuteRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var response = await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); + + var responseContent = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); + + return JsonSerializer.Deserialize(responseContent, s_jsonSerializerOptions); + } + + private async Task ExecuteRequestWithNotFoundHandlingAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + try + { + return await this.ExecuteRequestAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (HttpOperationException ex) when (ex.StatusCode == HttpStatusCode.NotFound) + { + return default; + } + } + + private async Task RunOperationAsync(string operationName, Func> operation) + { + try + { + return await operation.Invoke().ConfigureAwait(false); + } + catch (Exception ex) + { + throw new VectorStoreOperationException("Call to vector store failed.", ex) + { + VectorStoreType = DatabaseName, + CollectionName = this.CollectionName, + OperationName = operationName + }; + } + } + + /// + /// Returns custom mapper, generic data model mapper or default record mapper. + /// + private IVectorStoreRecordMapper InitializeMapper() + { + if (this._options.JsonObjectCustomMapper is not null) + { + return this._options.JsonObjectCustomMapper; + } + + if (typeof(TRecord) == typeof(VectorStoreGenericDataModel)) + { + var mapper = new WeaviateGenericDataModelMapper( + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.DataProperties, + this._propertyReader.VectorProperties, + this._propertyReader.JsonPropertyNamesMap, + s_jsonSerializerOptions); + + return (mapper as IVectorStoreRecordMapper)!; + } + + return new WeaviateVectorStoreRecordMapper( + this.CollectionName, + this._propertyReader.KeyProperty, + this._propertyReader.DataProperties, + this._propertyReader.VectorProperties, + this._propertyReader.JsonPropertyNamesMap, + s_jsonSerializerOptions); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs new file mode 100644 index 000000000000..935faaa56c0f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordCollectionOptions.cs @@ -0,0 +1,41 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +/// +/// Options when creating a . +/// +public sealed class WeaviateVectorStoreRecordCollectionOptions where TRecord : class +{ + /// + /// Gets or sets an optional custom mapper to use when converting between the data model and Weaviate record. + /// + public IVectorStoreRecordMapper? JsonObjectCustomMapper { get; init; } = null; + + /// + /// Gets or sets an optional record definition that defines the schema of the record type. + /// + /// + /// If not provided, the schema will be inferred from the record model class using reflection. + /// In this case, the record model properties must be annotated with the appropriate attributes to indicate their usage. + /// See , and . + /// + public VectorStoreRecordDefinition? VectorStoreRecordDefinition { get; init; } = null; + + /// + /// Weaviate endpoint for remote or local cluster. + /// + public Uri? Endpoint { get; set; } = null; + + /// + /// Weaviate API key. + /// + /// + /// This parameter is optional because authentication may be disabled in local clusters for testing purposes. + /// + public string? ApiKey { get; set; } = null; +} diff --git a/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs new file mode 100644 index 000000000000..9fbc346e6f25 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Memory.Weaviate/WeaviateVectorStoreRecordMapper.cs @@ -0,0 +1,126 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; + +namespace Microsoft.SemanticKernel.Connectors.Weaviate; + +internal sealed class WeaviateVectorStoreRecordMapper : IVectorStoreRecordMapper where TRecord : class +{ + private readonly string _collectionName; + + private readonly string _keyProperty; + + private readonly IReadOnlyList _dataProperties; + + private readonly IReadOnlyList _vectorProperties; + + private readonly IReadOnlyDictionary _storagePropertyNames; + + private readonly JsonSerializerOptions _jsonSerializerOptions; + + public WeaviateVectorStoreRecordMapper( + string collectionName, + VectorStoreRecordKeyProperty keyProperty, + IReadOnlyList dataProperties, + IReadOnlyList vectorProperties, + IReadOnlyDictionary storagePropertyNames, + JsonSerializerOptions jsonSerializerOptions) + { + Verify.NotNullOrWhiteSpace(collectionName); + Verify.NotNull(keyProperty); + Verify.NotNull(dataProperties); + Verify.NotNull(vectorProperties); + Verify.NotNull(storagePropertyNames); + Verify.NotNull(jsonSerializerOptions); + + this._collectionName = collectionName; + this._storagePropertyNames = storagePropertyNames; + this._jsonSerializerOptions = jsonSerializerOptions; + + this._keyProperty = this._storagePropertyNames[keyProperty.DataModelPropertyName]; + this._dataProperties = dataProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); + this._vectorProperties = vectorProperties.Select(property => this._storagePropertyNames[property.DataModelPropertyName]).ToList(); + } + + public JsonObject MapFromDataToStorageModel(TRecord dataModel) + { + Verify.NotNull(dataModel); + + var jsonNodeDataModel = JsonSerializer.SerializeToNode(dataModel, this._jsonSerializerOptions)!; + + // Transform data model to Weaviate object model. + var weaviateObjectModel = new JsonObject + { + { WeaviateConstants.ReservedCollectionPropertyName, JsonValue.Create(this._collectionName) }, + { WeaviateConstants.ReservedKeyPropertyName, jsonNodeDataModel[this._keyProperty]!.DeepClone() }, + { WeaviateConstants.ReservedDataPropertyName, new JsonObject() }, + { WeaviateConstants.ReservedVectorPropertyName, new JsonObject() }, + }; + + // Populate data properties. + foreach (var property in this._dataProperties) + { + var node = jsonNodeDataModel[property]; + + if (node is not null) + { + weaviateObjectModel[WeaviateConstants.ReservedDataPropertyName]![property] = node.DeepClone(); + } + } + + // Populate vector properties. + foreach (var property in this._vectorProperties) + { + var node = jsonNodeDataModel[property]; + + if (node is not null) + { + weaviateObjectModel[WeaviateConstants.ReservedVectorPropertyName]![property] = node.DeepClone(); + } + } + + return weaviateObjectModel; + } + + public TRecord MapFromStorageToDataModel(JsonObject storageModel, StorageToDataModelMapperOptions options) + { + Verify.NotNull(storageModel); + + // Transform Weaviate object model to data model. + var jsonNodeDataModel = new JsonObject + { + { this._keyProperty, storageModel[WeaviateConstants.ReservedKeyPropertyName]?.DeepClone() }, + }; + + // Populate data properties. + foreach (var property in this._dataProperties) + { + var node = storageModel[WeaviateConstants.ReservedDataPropertyName]?[property]; + + if (node is not null) + { + jsonNodeDataModel[property] = node.DeepClone(); + } + } + + // Populate vector properties. + if (options.IncludeVectors) + { + foreach (var property in this._vectorProperties) + { + var node = storageModel[WeaviateConstants.ReservedVectorPropertyName]?[property]; + + if (node is not null) + { + jsonNodeDataModel[property] = node.DeepClone(); + } + } + } + + return jsonNodeDataModel.Deserialize(this._jsonSerializerOptions)!; + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj new file mode 100644 index 000000000000..78afaac82621 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj @@ -0,0 +1,50 @@ +๏ปฟ + + + SemanticKernel.Connectors.Ollama.UnitTests + SemanticKernel.Connectors.Ollama.UnitTests + net8.0 + 12 + LatestMajor + true + enable + disable + false + CA2007,CA1861,VSTHRD111,CS1591,SKEXP0001,SKEXP0070 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + Always + + + + diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..668044164ded --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs @@ -0,0 +1,59 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions; + +/// +/// Unit tests of . +/// +public class OllamaKernelBuilderExtensionsTests +{ + [Fact] + public void AddOllamaTextGenerationCreatesService() + { + var builder = Kernel.CreateBuilder(); + builder.AddOllamaTextGeneration("model", new Uri("http://localhost:11434")); + + var kernel = builder.Build(); + var service = kernel.GetRequiredService(); + + Assert.NotNull(kernel); + Assert.NotNull(service); + Assert.IsType(service); + } + + [Fact] + public void AddOllamaChatCompletionCreatesService() + { + var builder = Kernel.CreateBuilder(); + builder.AddOllamaChatCompletion("model", new Uri("http://localhost:11434")); + + var kernel = builder.Build(); + var service = kernel.GetRequiredService(); + + Assert.NotNull(kernel); + Assert.NotNull(service); + Assert.IsType(service); + } + + [Fact] + public void AddOllamaTextEmbeddingGenerationCreatesService() + { + var builder = Kernel.CreateBuilder(); + builder.AddOllamaTextEmbeddingGeneration("model", new Uri("http://localhost:11434")); + + var kernel = builder.Build(); + var service = kernel.GetRequiredService(); + + Assert.NotNull(kernel); + Assert.NotNull(service); + Assert.IsType(service); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..2c3a4e79df04 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs @@ -0,0 +1,57 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.TextGeneration; +using Xunit; + +namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions; + +/// +/// Unit tests of . +/// +public class OllamaServiceCollectionExtensionsTests +{ + [Fact] + public void AddOllamaTextGenerationToServiceCollection() + { + var services = new ServiceCollection(); + services.AddOllamaTextGeneration("model", new Uri("http://localhost:11434")); + + var serviceProvider = services.BuildServiceProvider(); + var service = serviceProvider.GetRequiredService(); + + Assert.NotNull(service); + Assert.IsType(service); + } + + [Fact] + public void AddOllamaChatCompletionToServiceCollection() + { + var services = new ServiceCollection(); + services.AddOllamaChatCompletion("model", new Uri("http://localhost:11434")); + + var serviceProvider = services.BuildServiceProvider(); + var service = serviceProvider.GetRequiredService(); + + Assert.NotNull(service); + Assert.IsType(service); + } + + [Fact] + public void AddOllamaTextEmbeddingsGenerationToServiceCollection() + { + var services = new ServiceCollection(); + services.AddOllamaTextEmbeddingGeneration("model", new Uri("http://localhost:11434")); + + var serviceProvider = services.BuildServiceProvider(); + var service = serviceProvider.GetRequiredService(); + + Assert.NotNull(service); + Assert.IsType(service); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs new file mode 100644 index 000000000000..09fff4ab5d95 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs @@ -0,0 +1,260 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama; +using OllamaSharp.Models.Chat; +using Xunit; + +namespace SemanticKernel.Connectors.Ollama.UnitTests.Services; + +public sealed class OllamaChatCompletionTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + + public OllamaChatCompletionTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response_stream.txt")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") }; + } + + [Fact] + public async Task ShouldSendPromptToServiceAsync() + { + //Arrange + var sut = new OllamaChatCompletionService( + "fake-model", + httpClient: this._httpClient); + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + + //Act + await sut.GetChatMessageContentsAsync(chat); + + //Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.Equal("fake-text", requestPayload.Messages!.First().Content); + } + + [Fact] + public async Task ShouldHandleServiceResponseAsync() + { + //Arrange + var sut = new OllamaChatCompletionService( + "fake-model", + httpClient: this._httpClient); + + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + + //Act + var messages = await sut.GetChatMessageContentsAsync(chat); + + //Assert + Assert.NotNull(messages); + + var message = messages.SingleOrDefault(); + Assert.NotNull(message); + Assert.Equal("This is test completion response", message.Content); + } + + [Fact] + public async Task GetChatMessageContentsShouldHaveModelAndInnerContentAsync() + { + //Arrange + var sut = new OllamaChatCompletionService( + "phi3", + httpClient: this._httpClient); + + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + + //Act + var messages = await sut.GetChatMessageContentsAsync(chat); + + //Assert + Assert.NotNull(messages); + var message = messages.SingleOrDefault(); + Assert.NotNull(message); + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Null(requestPayload.Options.Stop); + Assert.Null(requestPayload.Options.Temperature); + Assert.Null(requestPayload.Options.TopK); + Assert.Null(requestPayload.Options.TopP); + + Assert.NotNull(message.ModelId); + Assert.Equal("phi3", message.ModelId); + + // Ollama Sharp always perform streaming even for non-streaming calls, + // The inner content in this case is the full list of chunks returned by the Ollama Client. + Assert.NotNull(message.InnerContent); + Assert.IsType>(message.InnerContent); + var innerContentList = message.InnerContent as List; + Assert.NotNull(innerContentList); + Assert.NotEmpty(innerContentList); + var lastMessage = innerContentList.Last(); + var doneMessageChunk = lastMessage as ChatDoneResponseStream; + Assert.NotNull(doneMessageChunk); + Assert.True(doneMessageChunk.Done); + Assert.Equal("stop", doneMessageChunk.DoneReason); + } + + [Fact] + public async Task GetStreamingChatMessageContentsShouldHaveModelAndInnerContentAsync() + { + //Arrange + var expectedModel = "phi3"; + var sut = new OllamaChatCompletionService( + expectedModel, + httpClient: this._httpClient); + + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + + // Act + StreamingChatMessageContent? lastMessage = null; + await foreach (var message in sut.GetStreamingChatMessageContentsAsync(chat)) + { + lastMessage = message; + Assert.NotNull(message.InnerContent); + } + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Null(requestPayload.Options.Stop); + Assert.Null(requestPayload.Options.Temperature); + Assert.Null(requestPayload.Options.TopK); + Assert.Null(requestPayload.Options.TopP); + + Assert.NotNull(lastMessage!.ModelId); + Assert.Equal(expectedModel, lastMessage.ModelId); + + Assert.IsType(lastMessage.InnerContent); + var innerContent = lastMessage.InnerContent as ChatDoneResponseStream; + Assert.NotNull(innerContent); + Assert.True(innerContent.Done); + } + + [Fact] + public async Task GetStreamingChatMessageContentsShouldHaveDoneReasonAsync() + { + //Arrange + var expectedModel = "phi3"; + var sut = new OllamaChatCompletionService( + expectedModel, + httpClient: this._httpClient); + + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + + // Act + StreamingChatMessageContent? lastMessage = null; + await foreach (var message in sut.GetStreamingChatMessageContentsAsync(chat)) + { + lastMessage = message; + } + + // Assert + Assert.NotNull(lastMessage); + Assert.IsType(lastMessage.InnerContent); + var innerContent = lastMessage.InnerContent as ChatDoneResponseStream; + Assert.NotNull(innerContent); + Assert.True(innerContent.Done); + Assert.Equal("stop", innerContent.DoneReason); + } + + [Fact] + public async Task GetStreamingChatMessageContentsExecutionSettingsMustBeSentAsync() + { + //Arrange + var sut = new OllamaChatCompletionService( + "fake-model", + httpClient: this._httpClient); + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + string jsonSettings = """ + { + "stop": ["stop me"], + "temperature": 0.5, + "top_p": 0.9, + "top_k": 100 + } + """; + + var executionSettings = JsonSerializer.Deserialize(jsonSettings); + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + // Act + await sut.GetStreamingChatMessageContentsAsync(chat, ollamaExecutionSettings).GetAsyncEnumerator().MoveNextAsync(); + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop); + Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature); + Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP); + Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK); + } + + [Fact] + public async Task GetChatMessageContentsExecutionSettingsMustBeSentAsync() + { + //Arrange + var sut = new OllamaChatCompletionService( + "fake-model", + httpClient: this._httpClient); + var chat = new ChatHistory(); + chat.AddMessage(AuthorRole.User, "fake-text"); + string jsonSettings = """ + { + "stop": ["stop me"], + "temperature": 0.5, + "top_p": 0.9, + "top_k": 100 + } + """; + + var executionSettings = JsonSerializer.Deserialize(jsonSettings); + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + // Act + await sut.GetChatMessageContentsAsync(chat, ollamaExecutionSettings); + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop); + Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature); + Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP); + Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs new file mode 100644 index 000000000000..ec1e63c1cd56 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs @@ -0,0 +1,67 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.Ollama; +using OllamaSharp.Models; +using Xunit; + +namespace SemanticKernel.Connectors.Ollama.UnitTests.Services; + +public sealed class OllamaTextEmbeddingGenerationTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + + public OllamaTextEmbeddingGenerationTests() + { + this._messageHandlerStub = new(); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/embeddings_test_response.json")); + this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") }; + } + + [Fact] + public async Task ShouldSendPromptToServiceAsync() + { + //Arrange + var sut = new OllamaTextEmbeddingGenerationService( + "fake-model", + httpClient: this._httpClient); + + //Act + await sut.GenerateEmbeddingsAsync(["fake-text"]); + + //Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.Equal("fake-text", requestPayload.Input[0]); + } + + [Fact] + public async Task ShouldHandleServiceResponseAsync() + { + //Arrange + var sut = new OllamaTextEmbeddingGenerationService( + "fake-model", + httpClient: this._httpClient); + + //Act + var contents = await sut.GenerateEmbeddingsAsync(["fake-text"]); + + //Assert + Assert.NotNull(contents); + Assert.Equal(2, contents.Count); + + var content = contents[0]; + Assert.Equal(5, content.Length); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs new file mode 100644 index 000000000000..c765bf1d678d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs @@ -0,0 +1,200 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.TextGeneration; +using OllamaSharp.Models; +using OllamaSharp.Models.Chat; +using Xunit; + +namespace SemanticKernel.Connectors.Ollama.UnitTests.Services; + +public sealed class OllamaTextGenerationTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + + public OllamaTextGenerationTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StreamContent(File.OpenRead("TestData/text_generation_test_response_stream.txt")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") }; + } + + [Fact] + public async Task ShouldSendPromptToServiceAsync() + { + //Arrange + var expectedModel = "phi3"; + var sut = new OllamaTextGenerationService( + expectedModel, + httpClient: this._httpClient); + + //Act + await sut.GetTextContentsAsync("fake-text"); + + //Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.Equal("fake-text", requestPayload.Prompt); + } + + [Fact] + public async Task ShouldHandleServiceResponseAsync() + { + //Arrange + var sut = new OllamaTextGenerationService( + "fake-model", + httpClient: this._httpClient); + + //Act + var contents = await sut.GetTextContentsAsync("fake-test"); + + //Assert + Assert.NotNull(contents); + + var content = contents.SingleOrDefault(); + Assert.NotNull(content); + Assert.Equal("This is test completion response", content.Text); + } + + [Fact] + public async Task GetTextContentsShouldHaveModelIdDefinedAsync() + { + //Arrange + var expectedModel = "phi3"; + var sut = new OllamaTextGenerationService( + expectedModel, + httpClient: this._httpClient); + + // Act + var textContent = await sut.GetTextContentAsync("Any prompt"); + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Null(requestPayload.Options.Stop); + Assert.Null(requestPayload.Options.Temperature); + Assert.Null(requestPayload.Options.TopK); + Assert.Null(requestPayload.Options.TopP); + + Assert.NotNull(textContent.ModelId); + Assert.Equal(expectedModel, textContent.ModelId); + } + + [Fact] + public async Task GetStreamingTextContentsShouldHaveModelIdDefinedAsync() + { + //Arrange + var expectedModel = "phi3"; + var sut = new OllamaTextGenerationService( + expectedModel, + httpClient: this._httpClient); + + // Act + StreamingTextContent? lastTextContent = null; + await foreach (var textContent in sut.GetStreamingTextContentsAsync("Any prompt")) + { + lastTextContent = textContent; + } + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Null(requestPayload.Options.Stop); + Assert.Null(requestPayload.Options.Temperature); + Assert.Null(requestPayload.Options.TopK); + Assert.Null(requestPayload.Options.TopP); + + Assert.NotNull(lastTextContent!.ModelId); + Assert.Equal(expectedModel, lastTextContent.ModelId); + } + + [Fact] + public async Task GetStreamingTextContentsExecutionSettingsMustBeSentAsync() + { + //Arrange + var sut = new OllamaTextGenerationService( + "fake-model", + httpClient: this._httpClient); + + string jsonSettings = """ + { + "stop": ["stop me"], + "temperature": 0.5, + "top_p": 0.9, + "top_k": 100 + } + """; + + var executionSettings = JsonSerializer.Deserialize(jsonSettings); + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + // Act + await sut.GetStreamingTextContentsAsync("Any prompt", ollamaExecutionSettings).GetAsyncEnumerator().MoveNextAsync(); + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop); + Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature); + Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP); + Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK); + } + + [Fact] + public async Task GetTextContentsExecutionSettingsMustBeSentAsync() + { + //Arrange + var sut = new OllamaTextGenerationService( + "fake-model", + httpClient: this._httpClient); + string jsonSettings = """ + { + "stop": ["stop me"], + "temperature": 0.5, + "top_p": 0.9, + "top_k": 100 + } + """; + + var executionSettings = JsonSerializer.Deserialize(jsonSettings); + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + // Act + await sut.GetTextContentsAsync("Any prompt", ollamaExecutionSettings); + + // Assert + var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + Assert.NotNull(requestPayload); + Assert.NotNull(requestPayload.Options); + Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop); + Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature); + Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP); + Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK); + } + + /// + /// Disposes resources used by this class. + /// + public void Dispose() + { + this._messageHandlerStub.Dispose(); + + this._httpClient.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs new file mode 100644 index 000000000000..b7ff3d1c57c5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs @@ -0,0 +1,65 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Text.Json; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Xunit; + +namespace SemanticKernel.Connectors.Ollama.UnitTests.Settings; + +/// +/// Unit tests of . +/// +public class OllamaPromptExecutionSettingsTests +{ + [Fact] + public void FromExecutionSettingsWhenAlreadyOllamaShouldReturnSame() + { + // Arrange + var executionSettings = new OllamaPromptExecutionSettings(); + + // Act + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + // Assert + Assert.Same(executionSettings, ollamaExecutionSettings); + } + + [Fact] + public void FromExecutionSettingsWhenNullShouldReturnDefault() + { + // Arrange + OllamaPromptExecutionSettings? executionSettings = null; + + // Act + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + // Assert + Assert.Null(ollamaExecutionSettings.Stop); + Assert.Null(ollamaExecutionSettings.Temperature); + Assert.Null(ollamaExecutionSettings.TopP); + Assert.Null(ollamaExecutionSettings.TopK); + } + + [Fact] + public void FromExecutionSettingsWhenSerializedHasPropertiesShouldPopulateSpecialized() + { + string jsonSettings = """ + { + "stop": ["stop me"], + "temperature": 0.5, + "top_p": 0.9, + "top_k": 100 + } + """; + + var executionSettings = JsonSerializer.Deserialize(jsonSettings); + var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + + Assert.Equal("stop me", ollamaExecutionSettings.Stop?.FirstOrDefault()); + Assert.Equal(0.5f, ollamaExecutionSettings.Temperature); + Assert.Equal(0.9f, ollamaExecutionSettings.TopP!.Value, 0.1f); + Assert.Equal(100, ollamaExecutionSettings.TopK); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt new file mode 100644 index 000000000000..55b26d234500 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt @@ -0,0 +1,6 @@ +{"model":"phi3","created_at":"2024-07-02T11:45:16.216898458Z","message":{"role":"assistant","content":"This "},"done":false} +{"model":"phi3","created_at":"2024-07-02T11:45:16.22693076Z","message":{"role":"assistant","content":"is "},"done":false} +{"model":"phi3","created_at":"2024-07-02T11:45:16.236570847Z","message":{"role":"assistant","content":"test "},"done":false} +{"model":"phi3","created_at":"2024-07-02T11:45:16.246538945Z","message":{"role":"assistant","content":"completion "},"done":false} +{"model":"phi3","created_at":"2024-07-02T11:45:16.25611096Z","message":{"role":"assistant","content":"response"},"done":false} +{"model":"phi3","created_at":"2024-07-02T11:45:16.265598822Z","message":{"role":"assistant","content":""},"done_reason":"stop","done":true,"total_duration":58123571935,"load_duration":55561676662,"prompt_eval_count":10,"prompt_eval_duration":34847000,"eval_count":239,"eval_duration":2381751000} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/embeddings_test_response.json b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/embeddings_test_response.json new file mode 100644 index 000000000000..3316addba6dd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/embeddings_test_response.json @@ -0,0 +1,19 @@ +{ + "model": "fake-model", + "embeddings": [ + [ + 0.020765934, + 0.007495159, + 0.01268963, + 0.013938076, + -0.04621073 + ], + [ + 0.025005031, + 0.009804744, + -0.016960088, + -0.024823941, + -0.02756831 + ] + ] +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt new file mode 100644 index 000000000000..d2fe45f536c9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt @@ -0,0 +1,6 @@ +{"model":"phi3","created_at":"2024-07-02T12:22:37.03627019Z","response":"This ","done":false} +{"model":"phi3","created_at":"2024-07-02T12:22:37.048915655Z","response":"is ","done":false} +{"model":"phi3","created_at":"2024-07-02T12:22:37.060968719Z","response":"test ","done":false} +{"model":"phi3","created_at":"2024-07-02T12:22:37.072390403Z","response":"completion ","done":false} +{"model":"phi3","created_at":"2024-07-02T12:22:37.072390403Z","response":"response","done":false} +{"model":"phi3","created_at":"2024-07-02T12:22:37.091017292Z","response":"","done":true,"done_reason":"stop","context":[32010,3750,338,278,14744,7254,29973,32007,32001,450,2769,278,14744,5692,7254,304,502,373,11563,756,304,437,411,278,14801,292,310,6575,4366,491,278,25005,29889,8991,4366,29892,470,4796,3578,29892,338,1754,701,310,263,18272,310,11955,393,508,367,3595,297,263,17251,17729,313,1127,29892,24841,29892,13328,29892,7933,29892,7254,29892,1399,5973,29892,322,28008,1026,467,910,18272,310,11955,338,2998,408,4796,3578,1363,372,3743,599,278,1422,281,6447,1477,29879,12420,4208,29889,13,13,10401,6575,4366,24395,11563,29915,29879,25005,29892,21577,13206,21337,763,21767,307,1885,322,288,28596,14801,20511,29899,29893,6447,1477,3578,313,9539,322,28008,1026,29897,901,1135,5520,29899,29893,6447,1477,3578,313,1127,322,13328,467,4001,1749,5076,526,901,20502,304,7254,3578,322,278,8991,5692,901,4796,515,1749,18520,373,11563,2861,304,445,14801,292,2779,29892,591,17189,573,278,14744,408,7254,29889,13,13,2528,17658,29892,5998,1716,7254,322,28008,1026,281,6447,1477,29879,310,3578,526,29574,22829,491,4799,13206,21337,29892,1749,639,1441,338,451,28482,491,278,28008,1026,2927,1951,5199,5076,526,3109,20502,304,372,29889,12808,29892,6575,4366,20888,11563,29915,29879,7101,756,263,6133,26171,297,278,13328,29899,12692,760,310,278,18272,9401,304,2654,470,28008,1026,11955,2861,304,9596,280,1141,14801,292,29892,607,4340,26371,2925,1749,639,1441,310,278,7254,14744,29889,13,13,797,15837,29892,278,14801,292,310,20511,281,6447,1477,3578,313,9539,322,28008,1026,29897,491,11563,29915,29879,25005,9946,502,304,1074,263,758,24130,10835,7254,14744,2645,2462,4366,6199,29889,32007],"total_duration":64697743903,"load_duration":61368714283,"prompt_eval_count":10,"prompt_eval_duration":40919000,"eval_count":304,"eval_duration":3237325000} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Ollama/AssemblyInfo.cs b/dotnet/src/Connectors/Connectors.Ollama/AssemblyInfo.cs new file mode 100644 index 000000000000..fe66371dbc58 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/AssemblyInfo.cs @@ -0,0 +1,6 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0070")] diff --git a/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj b/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj new file mode 100644 index 000000000000..1ce5397d2e07 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj @@ -0,0 +1,34 @@ +๏ปฟ + + + + Microsoft.SemanticKernel.Connectors.Ollama + $(AssemblyName) + net8;netstandard2.0 + alpha + + + + + + + + + Semantic Kernel - Ollama AI connectors + Semantic Kernel connector for Ollama. Contains services for text generation, chat completion and text embeddings. + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs b/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs new file mode 100644 index 000000000000..f9ed8fb7b4ff --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs @@ -0,0 +1,62 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using OllamaSharp; + +namespace Microsoft.SemanticKernel.Connectors.Ollama.Core; + +/// +/// Represents the core of a service. +/// +public abstract class ServiceBase +{ + /// + /// Attributes of the service. + /// + internal Dictionary AttributesInternal { get; } = []; + + /// + /// Internal Ollama Sharp client. + /// + internal readonly OllamaApiClient _client; + + internal ServiceBase(string model, + Uri? endpoint, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNullOrWhiteSpace(model); + this.AttributesInternal.Add(AIServiceExtensions.ModelIdKey, model); + + if (httpClient is not null) + { + this._client = new(httpClient, model); + } + else + { +#pragma warning disable CA2000 // Dispose objects before losing scope + // Client needs to be created to be able to inject Semantic Kernel headers + var internalClient = HttpClientProvider.GetHttpClient(); + internalClient.BaseAddress = endpoint; + internalClient.DefaultRequestHeaders.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); + internalClient.DefaultRequestHeaders.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(Kernel))); + + this._client = new(internalClient, model); +#pragma warning restore CA2000 // Dispose objects before losing scope + } + } + + internal ServiceBase(string model, + OllamaApiClient ollamaClient, + ILoggerFactory? loggerFactory = null) + { + Verify.NotNullOrWhiteSpace(model); + this._client = ollamaClient; + this.AttributesInternal.Add(AIServiceExtensions.ModelIdKey, model); + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs new file mode 100644 index 000000000000..0ad8d895bdd7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs @@ -0,0 +1,231 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using OllamaSharp; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods for adding Ollama Text Generation service to the kernel builder. +/// +public static class OllamaKernelBuilderExtensions +{ + #region Text Generation + + /// + /// Add Ollama Text Generation service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The endpoint to Ollama hosted service. + /// The optional service ID. + /// The optional custom HttpClient. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaTextGeneration( + this IKernelBuilder builder, + string modelId, + Uri endpoint, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextGenerationService( + modelId: modelId, + endpoint: endpoint, + loggerFactory: serviceProvider.GetService())); + return builder; + } + + /// + /// Add Ollama Text Generation service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The optional service ID. + /// The optional custom HttpClient. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaTextGeneration( + this IKernelBuilder builder, + string modelId, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextGenerationService( + modelId: modelId, + httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + loggerFactory: serviceProvider.GetService())); + return builder; + } + + /// + /// Add Ollama Text Generation service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The Ollama Sharp library client. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaTextGeneration( + this IKernelBuilder builder, + string modelId, + OllamaApiClient ollamaClient, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextGenerationService( + modelId: modelId, + ollamaClient: ollamaClient, + loggerFactory: serviceProvider.GetService())); + return builder; + } + + #endregion + + #region Chat Completion + + /// + /// Add Ollama Chat Completion service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The endpoint to Ollama hosted service. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaChatCompletion( + this IKernelBuilder builder, + string modelId, + Uri endpoint, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddOllamaChatCompletion(modelId, endpoint, serviceId); + + return builder; + } + + /// + /// Add Ollama Chat Completion service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The optional custom HttpClient. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaChatCompletion( + this IKernelBuilder builder, + string modelId, + HttpClient? httpClient = null, + string? serviceId = null + ) + { + Verify.NotNull(builder); + + builder.Services.AddOllamaChatCompletion(modelId, httpClient, serviceId); + + return builder; + } + + /// + /// Add Ollama Chat Completion service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The Ollama Sharp library client. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaChatCompletion( + this IKernelBuilder builder, + string modelId, + OllamaApiClient ollamaClient, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddOllamaChatCompletion(modelId, ollamaClient, serviceId); + + return builder; + } + + #endregion + + #region Text Embeddings + + /// + /// Add Ollama Text Embeddings Generation service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The endpoint to Ollama hosted service. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaTextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + Uri endpoint, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddOllamaTextEmbeddingGeneration(modelId, endpoint, serviceId); + + return builder; + } + + /// + /// Add Ollama Text Embeddings Generation service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The optional custom HttpClient. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaTextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddOllamaTextEmbeddingGeneration(modelId, httpClient, serviceId); + + return builder; + } + + /// + /// Add Ollama Text Embeddings Generation service to the kernel builder. + /// + /// The kernel builder. + /// The model for text generation. + /// The Ollama Sharp library client. + /// The optional service ID. + /// The updated kernel builder. + public static IKernelBuilder AddOllamaTextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + OllamaApiClient ollamaClient, + string? serviceId = null) + { + Verify.NotNull(builder); + + builder.Services.AddOllamaTextEmbeddingGeneration(modelId, ollamaClient, serviceId); + + return builder; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs new file mode 100644 index 000000000000..9ef438515e35 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs @@ -0,0 +1,243 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using OllamaSharp; + +namespace Microsoft.SemanticKernel; + +/// +/// Extension methods for adding Ollama Text Generation service to the kernel builder. +/// +public static class OllamaServiceCollectionExtensions +{ + #region Text Generation + + /// + /// Add Ollama Text Generation service to the specified service collection. + /// + /// The target service collection. + /// The model for text generation. + /// The endpoint to Ollama hosted service. + /// The optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaTextGeneration( + this IServiceCollection services, + string modelId, + Uri endpoint, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextGenerationService( + modelId: modelId, + endpoint: endpoint, + loggerFactory: serviceProvider.GetService())); + } + + /// + /// Add Ollama Text Generation service to the specified service collection. + /// + /// The target service collection. + /// The model for text generation. + /// Optional custom HttpClient, picked from ServiceCollection if not provided. + /// The optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaTextGeneration( + this IServiceCollection services, + string modelId, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextGenerationService( + modelId: modelId, + httpClient: HttpClientProvider.GetHttpClient(serviceProvider), + loggerFactory: serviceProvider.GetService())); + } + + /// + /// Add Ollama Text Generation service to the kernel builder. + /// + /// The target service collection. + /// The model for text generation. + /// The Ollama Sharp library client. + /// The optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaTextGeneration( + this IServiceCollection services, + string modelId, + OllamaApiClient ollamaClient, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextGenerationService( + modelId: modelId, + ollamaClient: ollamaClient, + loggerFactory: serviceProvider.GetService())); + } + + #endregion + + #region Chat Completion + + /// + /// Add Ollama Chat Completion and Text Generation services to the specified service collection. + /// + /// The target service collection. + /// The model for text generation. + /// The endpoint to Ollama hosted service. + /// Optional service ID. + /// The updated service collection. + public static IServiceCollection AddOllamaChatCompletion( + this IServiceCollection services, + string modelId, + Uri endpoint, + string? serviceId = null) + { + Verify.NotNull(services); + + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaChatCompletionService( + modelId: modelId, + endpoint: endpoint, + loggerFactory: serviceProvider.GetService())); + + return services; + } + + /// + /// Add Ollama Chat Completion and Text Generation services to the specified service collection. + /// + /// The target service collection. + /// The model for text generation. + /// Optional custom HttpClient, picked from ServiceCollection if not provided. + /// Optional service ID. + /// The updated service collection. + public static IServiceCollection AddOllamaChatCompletion( + this IServiceCollection services, + string modelId, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaChatCompletionService( + modelId: modelId, + httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + loggerFactory: serviceProvider.GetService())); + + return services; + } + + /// + /// Add Ollama Chat Completion service to the kernel builder. + /// + /// The target service collection. + /// The model for text generation. + /// The Ollama Sharp library client. + /// The optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaChatCompletion( + this IServiceCollection services, + string modelId, + OllamaApiClient ollamaClient, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaChatCompletionService( + modelId: modelId, + ollamaClient: ollamaClient, + loggerFactory: serviceProvider.GetService())); + } + + #endregion + + #region Text Embeddings + + /// + /// Add Ollama Text Embedding Generation services to the kernel builder. + /// + /// The target service collection. + /// The model for text generation. + /// The endpoint to Ollama hosted service. + /// Optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaTextEmbeddingGeneration( + this IServiceCollection services, + string modelId, + Uri endpoint, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextEmbeddingGenerationService( + modelId: modelId, + endpoint: endpoint, + loggerFactory: serviceProvider.GetService())); + } + + /// + /// Add Ollama Text Embedding Generation services to the kernel builder. + /// + /// The target service collection. + /// The model for text generation. + /// Optional custom HttpClient, picked from ServiceCollection if not provided. + /// Optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaTextEmbeddingGeneration( + this IServiceCollection services, + string modelId, + HttpClient? httpClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextEmbeddingGenerationService( + modelId: modelId, + httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + loggerFactory: serviceProvider.GetService())); + } + + /// + /// Add Ollama Text Embeddings Generation service to the kernel builder. + /// + /// The target service collection. + /// The model for text generation. + /// The Ollama Sharp library client. + /// The optional service ID. + /// The updated kernel builder. + public static IServiceCollection AddOllamaTextEmbeddingGeneration( + this IServiceCollection services, + string modelId, + OllamaApiClient ollamaClient, + string? serviceId = null) + { + Verify.NotNull(services); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OllamaTextEmbeddingGenerationService( + modelId: modelId, + ollamaClient: ollamaClient, + loggerFactory: serviceProvider.GetService())); + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs new file mode 100644 index 000000000000..e8e0c2e965e9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs @@ -0,0 +1,182 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.Ollama.Core; +using OllamaSharp; +using OllamaSharp.Models.Chat; + +namespace Microsoft.SemanticKernel.Connectors.Ollama; + +/// +/// Represents a chat completion service using Ollama Original API. +/// +public sealed class OllamaChatCompletionService : ServiceBase, IChatCompletionService +{ + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// The endpoint including the port where Ollama server is hosted + /// Optional logger factory to be used for logging. + public OllamaChatCompletionService( + string modelId, + Uri endpoint, + ILoggerFactory? loggerFactory = null) + : base(modelId, endpoint, null, loggerFactory) + { + Verify.NotNull(endpoint); + } + + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// HTTP client to be used for communication with the Ollama API. + /// Optional logger factory to be used for logging. + public OllamaChatCompletionService( + string modelId, + HttpClient httpClient, + ILoggerFactory? loggerFactory = null) + : base(modelId, null, httpClient, loggerFactory) + { + Verify.NotNull(httpClient); + Verify.NotNull(httpClient.BaseAddress); + } + + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// The Ollama API client. + /// Optional logger factory to be used for logging. + public OllamaChatCompletionService( + string modelId, + OllamaApiClient ollamaClient, + ILoggerFactory? loggerFactory = null) + : base(modelId, ollamaClient, loggerFactory) + { + } + + /// + public IReadOnlyDictionary Attributes => this.AttributesInternal; + + /// + public async Task> GetChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel); + var chatMessageContent = new ChatMessageContent(); + var fullContent = new StringBuilder(); + string? modelId = null; + AuthorRole? authorRole = null; + List innerContent = []; + + await foreach (var responseStreamChunk in this._client.Chat(request, cancellationToken).ConfigureAwait(false)) + { + if (responseStreamChunk is null) + { + continue; + } + + innerContent.Add(responseStreamChunk); + + if (responseStreamChunk.Message.Content is not null) + { + fullContent.Append(responseStreamChunk.Message.Content); + } + + if (responseStreamChunk.Message.Role is not null) + { + authorRole = GetAuthorRole(responseStreamChunk.Message.Role)!.Value; + } + + modelId ??= responseStreamChunk.Model; + } + + return [new ChatMessageContent( + role: authorRole ?? new(), + content: fullContent.ToString(), + modelId: modelId, + innerContent: innerContent)]; + } + + /// + public async IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel); + + await foreach (var message in this._client.Chat(request, cancellationToken).ConfigureAwait(false)) + { + yield return new StreamingChatMessageContent( + role: GetAuthorRole(message!.Message.Role), + content: message.Message.Content, + modelId: message.Model, + innerContent: message); + } + } + + #region Private + + private static AuthorRole? GetAuthorRole(ChatRole? role) => role?.ToString().ToUpperInvariant() switch + { + "USER" => AuthorRole.User, + "ASSISTANT" => AuthorRole.Assistant, + "SYSTEM" => AuthorRole.System, + null => null, + _ => new AuthorRole(role.ToString()!) + }; + + private static ChatRequest CreateChatRequest(ChatHistory chatHistory, OllamaPromptExecutionSettings settings, string selectedModel) + { + var messages = new List(); + foreach (var chatHistoryMessage in chatHistory) + { + ChatRole role = ChatRole.User; + if (chatHistoryMessage.Role == AuthorRole.System) + { + role = ChatRole.System; + } + else if (chatHistoryMessage.Role == AuthorRole.Assistant) + { + role = ChatRole.Assistant; + } + + messages.Add(new Message(role, chatHistoryMessage.Content!)); + } + + var request = new ChatRequest + { + Options = new() + { + Temperature = settings.Temperature, + TopP = settings.TopP, + TopK = settings.TopK, + Stop = settings.Stop?.ToArray() + }, + Messages = messages, + Model = selectedModel, + Stream = true + }; + + return request; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs new file mode 100644 index 000000000000..f5bee67d4ec5 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs @@ -0,0 +1,93 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.Ollama.Core; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; +using OllamaSharp; +using OllamaSharp.Models; + +namespace Microsoft.SemanticKernel.Connectors.Ollama; + +/// +/// Represents a embedding generation service using Ollama Original API. +/// +public sealed class OllamaTextEmbeddingGenerationService : ServiceBase, ITextEmbeddingGenerationService +{ + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// The endpoint including the port where Ollama server is hosted + /// Optional logger factory to be used for logging. + public OllamaTextEmbeddingGenerationService( + string modelId, + Uri endpoint, + ILoggerFactory? loggerFactory = null) + : base(modelId, endpoint, null, loggerFactory) + { + Verify.NotNull(endpoint); + } + + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// HTTP client to be used for communication with the Ollama API. + /// Optional logger factory to be used for logging. + public OllamaTextEmbeddingGenerationService( + string modelId, + HttpClient httpClient, + ILoggerFactory? loggerFactory = null) + : base(modelId, null, httpClient, loggerFactory) + { + Verify.NotNull(httpClient); + Verify.NotNull(httpClient.BaseAddress); + } + + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// The Ollama API client. + /// Optional logger factory to be used for logging. + public OllamaTextEmbeddingGenerationService( + string modelId, + OllamaApiClient ollamaClient, + ILoggerFactory? loggerFactory = null) + : base(modelId, ollamaClient, loggerFactory) + { + } + + /// + public IReadOnlyDictionary Attributes => this.AttributesInternal; + + /// + public async Task>> GenerateEmbeddingsAsync( + IList data, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + var request = new EmbedRequest + { + Model = this.GetModelId()!, + Input = data.ToList(), + }; + + var response = await this._client.Embed(request, cancellationToken: cancellationToken).ConfigureAwait(false); + + List> embeddings = []; + foreach (var embedding in response.Embeddings) + { + embeddings.Add(embedding.Select(@decimal => (float)@decimal).ToArray()); + } + + return embeddings; + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs new file mode 100644 index 000000000000..a9432c15d839 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs @@ -0,0 +1,142 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.Ollama.Core; +using Microsoft.SemanticKernel.TextGeneration; +using OllamaSharp; +using OllamaSharp.Models; + +namespace Microsoft.SemanticKernel.Connectors.Ollama; + +/// +/// Represents a text generation service using Ollama Original API. +/// +public sealed class OllamaTextGenerationService : ServiceBase, ITextGenerationService +{ + /// + /// Initializes a new instance of the class. + /// + /// The Ollama model for the text generation service. + /// The endpoint including the port where Ollama server is hosted + /// Optional logger factory to be used for logging. + public OllamaTextGenerationService( + string modelId, + Uri endpoint, + ILoggerFactory? loggerFactory = null) + : base(modelId, endpoint, null, loggerFactory) + { + Verify.NotNull(endpoint); + } + + /// + /// Initializes a new instance of the class. + /// + /// The Ollama model for the text generation service. + /// HTTP client to be used for communication with the Ollama API. + /// Optional logger factory to be used for logging. + public OllamaTextGenerationService( + string modelId, + HttpClient httpClient, + ILoggerFactory? loggerFactory = null) + : base(modelId, null, httpClient, loggerFactory) + { + Verify.NotNull(httpClient); + Verify.NotNull(httpClient.BaseAddress); + } + + /// + /// Initializes a new instance of the class. + /// + /// The hosted model. + /// The Ollama API client. + /// Optional logger factory to be used for logging. + public OllamaTextGenerationService( + string modelId, + OllamaApiClient ollamaClient, + ILoggerFactory? loggerFactory = null) + : base(modelId, ollamaClient, loggerFactory) + { + } + + /// + public IReadOnlyDictionary Attributes => this.AttributesInternal; + + /// + public async Task> GetTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + var fullContent = new StringBuilder(); + List innerContent = []; + string? modelId = null; + + var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + var request = CreateRequest(settings, this._client.SelectedModel); + request.Prompt = prompt; + + await foreach (var responseStreamChunk in this._client.Generate(request, cancellationToken).ConfigureAwait(false)) + { + if (responseStreamChunk is null) + { + continue; + } + + innerContent.Add(responseStreamChunk); + fullContent.Append(responseStreamChunk.Response); + + modelId ??= responseStreamChunk.Model; + } + + return [new TextContent( + text: fullContent.ToString(), + modelId: modelId, + innerContent: innerContent)]; + } + + /// + public async IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings); + var request = CreateRequest(settings, this._client.SelectedModel); + request.Prompt = prompt; + + await foreach (var content in this._client.Generate(request, cancellationToken).ConfigureAwait(false)) + { + yield return new StreamingTextContent( + text: content?.Response, + modelId: content?.Model, + innerContent: content); + } + } + + private static GenerateRequest CreateRequest(OllamaPromptExecutionSettings settings, string selectedModel) + { + var request = new GenerateRequest + { + Options = new() + { + Temperature = settings.Temperature, + TopP = settings.TopP, + TopK = settings.TopK, + Stop = settings.Stop?.ToArray() + }, + Model = selectedModel, + Stream = true + }; + + return request; + } +} diff --git a/dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs new file mode 100644 index 000000000000..30032bb981d4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs @@ -0,0 +1,122 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Text; + +namespace Microsoft.SemanticKernel.Connectors.Ollama; + +/// +/// Ollama Prompt Execution Settings. +/// +public sealed class OllamaPromptExecutionSettings : PromptExecutionSettings +{ + /// + /// Gets the specialization for the Ollama execution settings. + /// + /// Generic prompt execution settings. + /// Specialized Ollama execution settings. + public static OllamaPromptExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings) + { + switch (executionSettings) + { + case null: + return new(); + case OllamaPromptExecutionSettings settings: + return settings; + } + + var json = JsonSerializer.Serialize(executionSettings); + var ollamaExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); + if (ollamaExecutionSettings is not null) + { + return ollamaExecutionSettings; + } + + throw new ArgumentException( + $"Invalid execution settings, cannot convert to {nameof(OllamaPromptExecutionSettings)}", + nameof(executionSettings)); + } + + /// + /// Sets the stop sequences to use. When this pattern is encountered the + /// LLM will stop generating text and return. Multiple stop patterns may + /// be set by specifying multiple separate stop parameters in a model file. + /// + [JsonPropertyName("stop")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public List? Stop + { + get => this._stop; + + set + { + this.ThrowIfFrozen(); + this._stop = value; + } + } + + /// + /// Reduces the probability of generating nonsense. A higher value + /// (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) + /// will be more conservative. (Default: 40) + /// + [JsonPropertyName("top_k")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? TopK + { + get => this._topK; + + set + { + this.ThrowIfFrozen(); + this._topK = value; + } + } + + /// + /// Works together with top-k. A higher value (e.g., 0.95) will lead to + /// more diverse text, while a lower value (e.g., 0.5) will generate more + /// focused and conservative text. (Default: 0.9) + /// + [JsonPropertyName("top_p")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? TopP + { + get => this._topP; + + set + { + this.ThrowIfFrozen(); + this._topP = value; + } + } + + /// + /// The temperature of the model. Increasing the temperature will make the + /// model answer more creatively. (Default: 0.8) + /// + [JsonPropertyName("temperature")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature + { + get => this._temperature; + + set + { + this.ThrowIfFrozen(); + this._temperature = value; + } + } + + #region private + + private List? _stop; + private float? _temperature; + private float? _topP; + private int? _topK; + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs index b9a3af87fec1..72597d18d3a3 100644 --- a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs +++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIChatCompletionService.cs @@ -93,6 +93,7 @@ private async IAsyncEnumerable RunInferenceAsync(ChatHistory chatHistory using var generator = new Generator(this.GetModel(), generatorParams); + bool removeNextTokenStartingWithSpace = true; while (!generator.IsDone()) { cancellationToken.ThrowIfCancellationRequested(); @@ -104,7 +105,14 @@ private async IAsyncEnumerable RunInferenceAsync(ChatHistory chatHistory var outputTokens = generator.GetSequence(0); var newToken = outputTokens.Slice(outputTokens.Length - 1, 1); - var output = this.GetTokenizer().Decode(newToken); + string output = this.GetTokenizer().Decode(newToken); + + if (removeNextTokenStartingWithSpace && output[0] == ' ') + { + removeNextTokenStartingWithSpace = false; + output = output.TrimStart(); + } + return output; }, cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/Connectors/Connectors.Oobabooga/README.md b/dotnet/src/Connectors/Connectors.Oobabooga/README.md deleted file mode 100644 index b1c45cd528cd..000000000000 --- a/dotnet/src/Connectors/Connectors.Oobabooga/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Semantic Kernel Oobabooga AI Connector - -This connector have moved, please go [here](https://github.com/MyIntelligenceAgency/semantic-fleet) for more information. diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj new file mode 100644 index 000000000000..fbf387f76a33 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj @@ -0,0 +1,97 @@ +๏ปฟ + + + SemanticKernel.Connectors.OpenAI.UnitTests + $(AssemblyName) + net8.0 + true + enable + false + $(NoWarn);SKEXP0001;SKEXP0070;SKEXP0010;CS1591;IDE1006;RCS1261;CA1031;CA1308;CA1861;CA2007;CA2234;VSTHRD111;CA1812 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + Always + + + + diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs similarity index 77% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs index 9a5103f83e6e..5df2fb54cdb5 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs @@ -2,10 +2,10 @@ using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Net; using System.Net.Http; -using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; @@ -13,7 +13,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; public sealed class AutoFunctionInvocationFilterTests : IDisposable { @@ -126,6 +126,7 @@ public async Task FiltersAreExecutedCorrectlyOnStreamingAsync() public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() { // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); var executionOrder = new List(); var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); @@ -149,10 +150,10 @@ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() builder.Plugins.Add(plugin); - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); + builder.Services.AddSingleton((serviceProvider) => + { + return new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + }); this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); @@ -182,6 +183,7 @@ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync() public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming) { // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); var executionOrder = new List(); var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); @@ -214,10 +216,10 @@ public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming) builder.Plugins.Add(plugin); - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); + builder.Services.AddSingleton((serviceProvider) => + { + return new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + }); builder.Services.AddSingleton(filter1); builder.Services.AddSingleton(filter2); @@ -307,10 +309,12 @@ public async Task FilterCanHandleExceptionAsync() this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses(); - var chatCompletion = new OpenAIChatCompletionService(modelId: "test-model-id", apiKey: "test-api-key", httpClient: this._httpClient); + var chatCompletion = new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("System message"); // Act var result = await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); @@ -345,7 +349,8 @@ public async Task FilterCanHandleExceptionOnStreamingAsync() this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses(); - var chatCompletion = new OpenAIChatCompletionService(modelId: "test-model-id", apiKey: "test-api-key", httpClient: this._httpClient); + var chatCompletion = new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + var chatHistory = new ChatHistory(); var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; @@ -385,8 +390,8 @@ public async Task FiltersCanSkipFunctionExecutionAsync() filterInvocations++; }); - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/filters_multiple_function_calls_test_response.json")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) }; this._messageHandlerStub.ResponsesToReturn = [response1, response2]; @@ -568,131 +573,6 @@ public async Task PostFilterCanTerminateOperationOnStreamingAsync() Assert.Equal(AuthorRole.Tool, lastMessageContent.Role); } - [Fact] - public async Task FilterContextHasCancellationTokenAsync() - { - // Arrange - using var cancellationTokenSource = new CancellationTokenSource(); - int firstFunctionInvocations = 0; - int secondFunctionInvocations = 0; - - var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => - { - cancellationTokenSource.Cancel(); - firstFunctionInvocations++; - return parameter; - }, "Function1"); - - var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => - { - secondFunctionInvocations++; - return parameter; - }, "Function2"); - - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); - - var kernel = this.GetKernelWithFilter(plugin, async (context, next) => - { - Assert.Equal(cancellationTokenSource.Token, context.CancellationToken); - - await next(context); - - context.CancellationToken.ThrowIfCancellationRequested(); - }); - - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; - - this._messageHandlerStub.ResponsesToReturn = [response1, response2]; - - var arguments = new KernelArguments(new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }); - - // Act & Assert - var exception = await Assert.ThrowsAsync(() - => kernel.InvokePromptAsync("Test prompt", arguments, cancellationToken: cancellationTokenSource.Token)); - - Assert.Equal(1, firstFunctionInvocations); - Assert.Equal(0, secondFunctionInvocations); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public async Task FilterContextHasOperationRelatedInformationAsync(bool isStreaming) - { - // Arrange - List actualToolCallIds = []; - List actualChatMessageContents = []; - - var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); - - var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); - var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2"); - - var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); - - var filter = new AutoFunctionInvocationFilter(async (context, next) => - { - actualToolCallIds.Add(context.ToolCallId); - actualChatMessageContents.Add(context.ChatMessageContent); - - await next(context); - }); - - var builder = Kernel.CreateBuilder(); - - builder.Plugins.Add(plugin); - - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); - - builder.Services.AddSingleton(filter); - - var kernel = builder.Build(); - - var arguments = new KernelArguments(new OpenAIPromptExecutionSettings - { - ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions - }); - - // Act - if (isStreaming) - { - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) }; - - this._messageHandlerStub.ResponsesToReturn = [response1, response2]; - - await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", arguments)) - { } - } - else - { - using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }; - using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) }; - - this._messageHandlerStub.ResponsesToReturn = [response1, response2]; - - await kernel.InvokePromptAsync("Test prompt", arguments); - } - - // Assert - Assert.Equal(["tool-call-id-1", "tool-call-id-2"], actualToolCallIds); - - foreach (var chatMessageContent in actualChatMessageContents) - { - var content = chatMessageContent as OpenAIChatMessageContent; - - Assert.NotNull(content); - - Assert.Equal("test-model-id", content.ModelId); - Assert.Equal(AuthorRole.Assistant, content.Role); - Assert.Equal(2, content.ToolCalls.Count); - } - } - public void Dispose() { this._httpClient.Dispose(); @@ -705,18 +585,18 @@ public void Dispose() private static List GetFunctionCallingResponses() { return [ - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) } + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_multiple_function_calls_test_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_multiple_function_calls_test_response.json")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) } ]; } private static List GetFunctionCallingStreamingResponses() { return [ - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_streaming_multiple_function_calls_test_response.txt")) }, - new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) } + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_streaming_multiple_function_calls_test_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_streaming_multiple_function_calls_test_response.txt")) }, + new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) } ]; } #pragma warning restore CA2000 @@ -731,10 +611,10 @@ private Kernel GetKernelWithFilter( builder.Plugins.Add(plugin); builder.Services.AddSingleton(filter); - builder.AddOpenAIChatCompletion( - modelId: "test-model-id", - apiKey: "test-api-key", - httpClient: this._httpClient); + builder.Services.AddSingleton((serviceProvider) => + { + return new OpenAIChatCompletionService("model-id", "test-api-key", "organization-id", this._httpClient); + }); return builder.Build(); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs new file mode 100644 index 000000000000..8597fb4b9dd9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs @@ -0,0 +1,243 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using Moq; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; +public partial class ClientCoreTests +{ + [Fact] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected() + { + // Act + var logger = new Mock>().Object; + var openAIClient = new OpenAIClient(new ApiKeyCredential("key")); + + var clientCoreModelConstructor = new ClientCore("model1", "apiKey"); + var clientCoreOpenAIClientConstructor = new ClientCore("model1", openAIClient, logger: logger); + + // Assert + Assert.NotNull(clientCoreModelConstructor); + Assert.NotNull(clientCoreOpenAIClientConstructor); + + Assert.Equal("model1", clientCoreModelConstructor.ModelId); + Assert.Equal("model1", clientCoreOpenAIClientConstructor.ModelId); + + Assert.NotNull(clientCoreModelConstructor.Client); + Assert.NotNull(clientCoreOpenAIClientConstructor.Client); + Assert.Equal(openAIClient, clientCoreOpenAIClientConstructor.Client); + Assert.Equal(NullLogger.Instance, clientCoreModelConstructor.Logger); + Assert.Equal(logger, clientCoreOpenAIClientConstructor.Logger); + } + + [Theory] + [InlineData(null, null)] + [InlineData("http://localhost", null)] + [InlineData(null, "http://localhost")] + [InlineData("http://localhost-1", "http://localhost-2")] + public void ItUsesEndpointAsExpected(string? clientBaseAddress, string? providedEndpoint) + { + // Arrange + Uri? endpoint = null; + HttpClient? client = null; + if (providedEndpoint is not null) + { + endpoint = new Uri(providedEndpoint); + } + + if (clientBaseAddress is not null) + { + client = new HttpClient { BaseAddress = new Uri(clientBaseAddress) }; + } + + // Act + var clientCore = new ClientCore("model", "apiKey", endpoint: endpoint, httpClient: client); + + // Assert + Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/v1"), clientCore.Endpoint); + Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.EndpointKey)); + Assert.Equal(endpoint?.ToString() ?? client?.BaseAddress?.ToString() ?? "https://api.openai.com/v1", clientCore.Attributes[AIServiceExtensions.EndpointKey]); + + client?.Dispose(); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task ItAddOrganizationHeaderWhenProvidedAsync(bool organizationIdProvided) + { + using HttpMessageHandlerStub handler = new(); + using HttpClient client = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + // Act + var clientCore = new ClientCore( + modelId: "model", + apiKey: "test", + organizationId: (organizationIdProvided) ? "organization" : null, + httpClient: client); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = "POST"; + pipelineMessage.Request.Uri = new Uri("http://localhost"); + pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test")); + + // Assert + await clientCore.Client.Pipeline.SendAsync(pipelineMessage); + + if (organizationIdProvided) + { + Assert.True(handler.RequestHeaders!.Contains("OpenAI-Organization")); + Assert.Equal("organization", handler.RequestHeaders.GetValues("OpenAI-Organization").FirstOrDefault()); + } + else + { + Assert.False(handler.RequestHeaders!.Contains("OpenAI-Organization")); + } + } + + [Fact] + public async Task ItAddSemanticKernelHeadersOnEachRequestAsync() + { + using HttpMessageHandlerStub handler = new(); + using HttpClient client = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + // Act + var clientCore = new ClientCore(modelId: "model", apiKey: "test", httpClient: client); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = "POST"; + pipelineMessage.Request.Uri = new Uri("http://localhost"); + pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test")); + + // Assert + await clientCore.Client.Pipeline.SendAsync(pipelineMessage); + + Assert.True(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)); + Assert.Equal(HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore)), handler.RequestHeaders.GetValues(HttpHeaderConstant.Names.SemanticKernelVersion).FirstOrDefault()); + + Assert.True(handler.RequestHeaders.Contains("User-Agent")); + Assert.Contains(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault()); + } + + [Fact] + public async Task ItDoesNotAddSemanticKernelHeadersWhenOpenAIClientIsProvidedAsync() + { + using HttpMessageHandlerStub handler = new(); + using HttpClient client = new(handler); + handler.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK); + + // Act + var clientCore = new ClientCore( + modelId: "model", + openAIClient: new OpenAIClient( + new ApiKeyCredential("test"), + new OpenAIClientOptions() + { + Transport = new HttpClientPipelineTransport(client), + RetryPolicy = new ClientRetryPolicy(maxRetries: 0), + NetworkTimeout = Timeout.InfiniteTimeSpan + })); + + var pipelineMessage = clientCore.Client!.Pipeline.CreateMessage(); + pipelineMessage.Request.Method = "POST"; + pipelineMessage.Request.Uri = new Uri("http://localhost"); + pipelineMessage.Request.Content = BinaryContent.Create(new BinaryData("test")); + + // Assert + await clientCore.Client.Pipeline.SendAsync(pipelineMessage); + + Assert.False(handler.RequestHeaders!.Contains(HttpHeaderConstant.Names.SemanticKernelVersion)); + Assert.DoesNotContain(HttpHeaderConstant.Values.UserAgent, handler.RequestHeaders.GetValues("User-Agent").FirstOrDefault()); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + [InlineData("value")] + public void ItAddsAttributesButDoesNothingIfNullOrEmpty(string? value) + { + // Arrange + var clientCore = new ClientCore("model", "apikey"); + // Act + + clientCore.AddAttribute("key", value); + + // Assert + if (string.IsNullOrEmpty(value)) + { + Assert.False(clientCore.Attributes.ContainsKey("key")); + } + else + { + Assert.True(clientCore.Attributes.ContainsKey("key")); + Assert.Equal(value, clientCore.Attributes["key"]); + } + } + + [Fact] + public void ItAddsModelIdAttributeAsExpected() + { + // Arrange + var expectedModelId = "modelId"; + + // Act + var clientCore = new ClientCore(expectedModelId, "apikey"); + var clientCoreBreakingGlass = new ClientCore(expectedModelId, new OpenAIClient(new ApiKeyCredential(" "))); + + // Assert + Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.True(clientCoreBreakingGlass.Attributes.ContainsKey(AIServiceExtensions.ModelIdKey)); + Assert.Equal(expectedModelId, clientCore.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal(expectedModelId, clientCoreBreakingGlass.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItAddOrNotOrganizationIdAttributeWhenProvided() + { + // Arrange + var expectedOrganizationId = "organizationId"; + + // Act + var clientCore = new ClientCore("modelId", "apikey", expectedOrganizationId); + var clientCoreWithoutOrgId = new ClientCore("modelId", "apikey"); + + // Assert + Assert.True(clientCore.Attributes.ContainsKey(ClientCore.OrganizationKey)); + Assert.Equal(expectedOrganizationId, clientCore.Attributes[ClientCore.OrganizationKey]); + Assert.False(clientCoreWithoutOrgId.Attributes.ContainsKey(ClientCore.OrganizationKey)); + } + + [Fact] + public void ItThrowsWhenNotUsingCustomEndpointAndApiKeyIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new ClientCore("modelId", " ")); + Assert.Throws(() => new ClientCore("modelId", "")); + Assert.Throws(() => new ClientCore("modelId", apiKey: null!)); + } + + [Fact] + public void ItDoesNotThrowWhenUsingCustomEndpointAndApiKeyIsNotProvided() + { + // Act & Assert + ClientCore? clientCore = null; + clientCore = new ClientCore("modelId", " ", endpoint: new Uri("http://localhost")); + clientCore = new ClientCore("modelId", "", endpoint: new Uri("http://localhost")); + clientCore = new ClientCore("modelId", apiKey: null!, endpoint: new Uri("http://localhost")); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIChatMessageContentTests.cs similarity index 72% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIChatMessageContentTests.cs index cf2d32d3b52e..67814afab387 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIChatMessageContentTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIChatMessageContentTests.cs @@ -1,14 +1,15 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections; using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using Azure.AI.OpenAI; +using System.Text.Json; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; /// /// Unit tests for class. @@ -19,10 +20,10 @@ public sealed class OpenAIChatMessageContentTests public void ConstructorsWorkCorrectly() { // Arrange - List toolCalls = [new FakeChatCompletionsToolCall("id")]; + List toolCalls = [ChatToolCall.CreateFunctionToolCall("id", "name", BinaryData.FromString("args"))]; // Act - var content1 = new OpenAIChatMessageContent(new ChatRole("user"), "content1", "model-id1", toolCalls) { AuthorName = "Fred" }; + var content1 = new OpenAIChatMessageContent(ChatMessageRole.User, "content1", "model-id1", toolCalls) { AuthorName = "Fred" }; var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content2", "model-id2", toolCalls); // Assert @@ -34,11 +35,11 @@ public void ConstructorsWorkCorrectly() public void GetOpenAIFunctionToolCallsReturnsCorrectList() { // Arrange - List toolCalls = [ - new ChatCompletionsFunctionToolCall("id1", "name", string.Empty), - new ChatCompletionsFunctionToolCall("id2", "name", string.Empty), - new FakeChatCompletionsToolCall("id3"), - new FakeChatCompletionsToolCall("id4")]; + var args = JsonSerializer.Serialize(new Dictionary()); + + List toolCalls = [ + ChatToolCall.CreateFunctionToolCall("id1", "name", BinaryData.FromString(args)), + ChatToolCall.CreateFunctionToolCall("id2", "name", BinaryData.FromString(args))]; var content1 = new OpenAIChatMessageContent(AuthorRole.User, "content", "model-id", toolCalls); var content2 = new OpenAIChatMessageContent(AuthorRole.User, "content", "model-id", []); @@ -61,15 +62,15 @@ public void GetOpenAIFunctionToolCallsReturnsCorrectList() public void MetadataIsInitializedCorrectly(bool readOnlyMetadata) { // Arrange + var args = JsonSerializer.Serialize(new Dictionary()); + IReadOnlyDictionary metadata = readOnlyMetadata ? new CustomReadOnlyDictionary(new Dictionary { { "key", "value" } }) : new Dictionary { { "key", "value" } }; - List toolCalls = [ - new ChatCompletionsFunctionToolCall("id1", "name", string.Empty), - new ChatCompletionsFunctionToolCall("id2", "name", string.Empty), - new FakeChatCompletionsToolCall("id3"), - new FakeChatCompletionsToolCall("id4")]; + List toolCalls = [ + ChatToolCall.CreateFunctionToolCall("id1", "name", BinaryData.FromString(args)), + ChatToolCall.CreateFunctionToolCall("id2", "name", BinaryData.FromString(args))]; // Act var content1 = new OpenAIChatMessageContent(AuthorRole.User, "content1", "model-id1", [], metadata); @@ -83,9 +84,9 @@ public void MetadataIsInitializedCorrectly(bool readOnlyMetadata) Assert.Equal(2, content2.Metadata.Count); Assert.Equal("value", content2.Metadata["key"]); - Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]); + Assert.IsType>(content2.Metadata["ChatResponseMessage.FunctionToolCalls"]); - var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List; + var actualToolCalls = content2.Metadata["ChatResponseMessage.FunctionToolCalls"] as List; Assert.NotNull(actualToolCalls); Assert.Equal(2, actualToolCalls.Count); @@ -97,7 +98,7 @@ private void AssertChatMessageContent( AuthorRole expectedRole, string expectedContent, string expectedModelId, - IReadOnlyList expectedToolCalls, + IReadOnlyList expectedToolCalls, OpenAIChatMessageContent actualContent, string? expectedName = null) { @@ -108,9 +109,6 @@ private void AssertChatMessageContent( Assert.Same(expectedToolCalls, actualContent.ToolCalls); } - private sealed class FakeChatCompletionsToolCall(string id) : ChatCompletionsToolCall(id) - { } - private sealed class CustomReadOnlyDictionary(IDictionary dictionary) : IReadOnlyDictionary // explicitly not implementing IDictionary<> { public TValue this[TKey key] => dictionary[key]; @@ -119,7 +117,7 @@ private sealed class CustomReadOnlyDictionary(IDictionary dictionary.Count; public bool ContainsKey(TKey key) => dictionary.ContainsKey(key); public IEnumerator> GetEnumerator() => dictionary.GetEnumerator(); - public bool TryGetValue(TKey key, [MaybeNullWhen(false)] out TValue value) => dictionary.TryGetValue(key, out value); + public bool TryGetValue(TKey key, out TValue value) => dictionary.TryGetValue(key, out value!); IEnumerator IEnumerable.GetEnumerator() => dictionary.GetEnumerator(); } } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionTests.cs similarity index 83% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionTests.cs index a9f94d81a673..1967ee882ec8 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/OpenAIFunctionTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionTests.cs @@ -4,12 +4,12 @@ using System.ComponentModel; using System.Linq; using System.Text.Json; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; public sealed class OpenAIFunctionTests { @@ -52,11 +52,11 @@ public void ItCanConvertToFunctionDefinitionWithNoPluginName() OpenAIFunction sut = KernelFunctionFactory.CreateFromMethod(() => { }, "myfunc", "This is a description of the function.").Metadata.ToOpenAIFunction(); // Act - FunctionDefinition result = sut.ToFunctionDefinition(); + ChatTool result = sut.ToFunctionDefinition(); // Assert - Assert.Equal(sut.FunctionName, result.Name); - Assert.Equal(sut.Description, result.Description); + Assert.Equal(sut.FunctionName, result.FunctionName); + Assert.Equal(sut.Description, result.FunctionDescription); } [Fact] @@ -69,7 +69,7 @@ public void ItCanConvertToFunctionDefinitionWithNullParameters() var result = sut.ToFunctionDefinition(); // Assert - Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.Parameters.ToString()); + Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{}}", result.FunctionParameters.ToString()); } [Fact] @@ -82,11 +82,11 @@ public void ItCanConvertToFunctionDefinitionWithPluginName() }).GetFunctionsMetadata()[0].ToOpenAIFunction(); // Act - FunctionDefinition result = sut.ToFunctionDefinition(); + ChatTool result = sut.ToFunctionDefinition(); // Assert - Assert.Equal("myplugin-myfunc", result.Name); - Assert.Equal(sut.Description, result.Description); + Assert.Equal("myplugin-myfunc", result.FunctionName); + Assert.Equal(sut.Description, result.FunctionDescription); } [Fact] @@ -104,15 +104,15 @@ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndReturnParamete OpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); - FunctionDefinition functionDefinition = sut.ToFunctionDefinition(); + ChatTool functionDefinition = sut.ToFunctionDefinition(); var exp = JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)); - var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters)); + var act = JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters)); Assert.NotNull(functionDefinition); - Assert.Equal("Tests-TestFunction", functionDefinition.Name); - Assert.Equal("My test function", functionDefinition.Description); - Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters))); + Assert.Equal("Tests-TestFunction", functionDefinition.FunctionName); + Assert.Equal("My test function", functionDefinition.FunctionDescription); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters))); } [Fact] @@ -130,12 +130,12 @@ public void ItCanConvertToFunctionDefinitionsWithParameterTypesAndNoReturnParame OpenAIFunction sut = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); - FunctionDefinition functionDefinition = sut.ToFunctionDefinition(); + ChatTool functionDefinition = sut.ToFunctionDefinition(); Assert.NotNull(functionDefinition); - Assert.Equal("Tests-TestFunction", functionDefinition.Name); - Assert.Equal("My test function", functionDefinition.Description); - Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.Parameters))); + Assert.Equal("Tests-TestFunction", functionDefinition.FunctionName); + Assert.Equal("My test function", functionDefinition.FunctionDescription); + Assert.Equal(JsonSerializer.Serialize(KernelJsonSchema.Parse(expectedParameterSchema)), JsonSerializer.Serialize(KernelJsonSchema.Parse(functionDefinition.FunctionParameters))); } [Fact] @@ -147,8 +147,8 @@ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypes() parameters: [new KernelParameterMetadata("param1")]).Metadata.ToOpenAIFunction(); // Act - FunctionDefinition result = f.ToFunctionDefinition(); - ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!; + ChatTool result = f.ToFunctionDefinition(); + ParametersData pd = JsonSerializer.Deserialize(result.FunctionParameters.ToString())!; // Assert Assert.NotNull(pd.properties); @@ -167,8 +167,8 @@ public void ItCanConvertToFunctionDefinitionsWithNoParameterTypesButWithDescript parameters: [new KernelParameterMetadata("param1") { Description = "something neat" }]).Metadata.ToOpenAIFunction(); // Act - FunctionDefinition result = f.ToFunctionDefinition(); - ParametersData pd = JsonSerializer.Deserialize(result.Parameters.ToString())!; + ChatTool result = f.ToFunctionDefinition(); + ParametersData pd = JsonSerializer.Deserialize(result.FunctionParameters.ToString())!; // Assert Assert.NotNull(pd.properties); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionToolCallTests.cs similarity index 78% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionToolCallTests.cs index 3b4d8b4ca0d4..790c2547177c 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIFunctionToolCallTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIFunctionToolCallTests.cs @@ -1,12 +1,14 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Text; -using Azure.AI.OpenAI; +using System.Text.Json; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; /// /// Unit tests for class. @@ -19,7 +21,8 @@ public sealed class OpenAIFunctionToolCallTests public void FullyQualifiedNameReturnsValidName(string toolCallName, string expectedName) { // Arrange - var toolCall = new ChatCompletionsFunctionToolCall("id", toolCallName, string.Empty); + var args = JsonSerializer.Serialize(new Dictionary()); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", toolCallName, BinaryData.FromString(args)); var openAIFunctionToolCall = new OpenAIFunctionToolCall(toolCall); // Act & Assert @@ -31,7 +34,7 @@ public void FullyQualifiedNameReturnsValidName(string toolCallName, string expec public void ToStringReturnsCorrectValue() { // Arrange - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}"); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin_MyFunction", BinaryData.FromString("{\n \"location\": \"San Diego\",\n \"max_price\": 300\n}")); var openAIFunctionToolCall = new OpenAIFunctionToolCall(toolCall); // Act & Assert @@ -47,7 +50,7 @@ public void ConvertToolCallUpdatesWithEmptyIndexesReturnsEmptyToolCalls() var functionArgumentBuildersByIndex = new Dictionary(); // Act - var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls( ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); @@ -65,7 +68,7 @@ public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls() var functionArgumentBuildersByIndex = new Dictionary { { 3, new("test-argument") } }; // Act - var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + var toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls( ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); @@ -76,7 +79,7 @@ public void ConvertToolCallUpdatesWithNotEmptyIndexesReturnsNotEmptyToolCalls() var toolCall = toolCalls[0]; Assert.Equal("test-id", toolCall.Id); - Assert.Equal("test-function", toolCall.Name); - Assert.Equal("test-argument", toolCall.Arguments); + Assert.Equal("test-function", toolCall.FunctionName); + Assert.Equal("test-argument", toolCall.FunctionArguments.ToString()); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIJsonSchemaTransformerTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIJsonSchemaTransformerTests.cs new file mode 100644 index 000000000000..b6df5ae06354 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIJsonSchemaTransformerTests.cs @@ -0,0 +1,166 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.ComponentModel; +using System.Text.Json; +using JsonSchemaMapper; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; + +/// +/// Unit tests for class. +/// +public sealed class OpenAIJsonSchemaTransformerTests +{ + private static readonly JsonSchemaMapperConfiguration s_jsonSchemaMapperConfiguration = new() + { + IncludeSchemaVersion = false, + IncludeTypeInEnums = true, + TreatNullObliviousAsNonNullable = true, + TransformSchemaNode = OpenAIJsonSchemaTransformer.Transform, + }; + + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + WriteIndented = false + }; + + [Fact] + public void ItTransformsJsonSchemaCorrectly() + { + // Arrange + var type = typeof(Parent); + var expectedSchema = """ + { + "type": "object", + "properties": { + "Items": { + "type": "array", + "items": { + "type": "object", + "properties": { + "NumericProperty": { + "description": "Description of numeric property.", + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "NumericProperty" + ] + } + }, + "Item": { + "type": "object", + "properties": { + "NumericProperty": { + "description": "Description of numeric property.", + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "NumericProperty" + ] + }, + "NullableItems": { + "type": [ + "array", + "null" + ], + "items": { + "type": "object", + "properties": { + "TextProperty": { + "type": [ + "string", + "null" + ] + } + }, + "additionalProperties": false, + "required": [ + "TextProperty" + ] + } + }, + "NullableItem": { + "type": [ + "object", + "null" + ], + "properties": { + "TextProperty": { + "type": [ + "string", + "null" + ] + } + }, + "additionalProperties": false, + "required": [ + "TextProperty" + ] + }, + "TextProperty": { + "type": [ + "string", + "null" + ] + } + }, + "additionalProperties": false, + "required": [ + "Items", + "Item", + "NullableItems", + "NullableItem", + "TextProperty" + ] + } + """; + + // Act + var schema = KernelJsonSchemaBuilder.Build(options: null, type, configuration: s_jsonSchemaMapperConfiguration); + + // Assert + Assert.Equal(NormalizeJson(expectedSchema), NormalizeJson(schema.ToString())); + } + + #region private + + private static string NormalizeJson(string json) + { + using JsonDocument doc = JsonDocument.Parse(json); + return JsonSerializer.Serialize(doc, s_jsonSerializerOptions); + } + + private sealed class Parent + { + public List Items { get; set; } = []; + + public Child Item { get; set; } = new(); + + public List? NullableItems { get; set; } + + public ChildNullable? NullableItem { get; set; } + + public string? TextProperty { get; set; } + } + + private sealed class Child + { + [Description("Description of numeric property.")] + public int NumericProperty { get; set; } + } + + private struct ChildNullable + { + public string? TextProperty { get; set; } + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs new file mode 100644 index 000000000000..15782d16f427 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/OpenAIWithDataStreamingChatMessageContentTests.cs @@ -0,0 +1,139 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; + +#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions + +/// +/// Unit tests for class. +/// +public sealed class OpenAIStreamingChatMessageContentTests +{ + [Fact] + public async Task ConstructorWithStreamingUpdateAsync() + { + // Arrange + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); + + using var messageHandlerStub = new HttpMessageHandlerStub(); + messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + using var httpClient = new HttpClient(messageHandlerStub); + var openAIClient = new OpenAIClient(new ApiKeyCredential("key"), new() { Transport = new HttpClientPipelineTransport(httpClient) }); + + // Act & Assert + var enumerator = openAIClient.GetChatClient("modelId").CompleteChatStreamingAsync("Test message").GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + var update = enumerator.Current; + + // Act + var content = new OpenAIStreamingChatMessageContent(update!, 0, "model-id"); + + // Assert + Assert.Equal("Test chat streaming response", content.Content); + } + + [Fact] + public void ConstructorWithParameters() + { + // Act + var content = new OpenAIStreamingChatMessageContent( + authorRole: AuthorRole.User, + content: "test message", + choiceIndex: 0, + modelId: "testModel", + toolCallUpdates: [], + metadata: new Dictionary() { ["test-index"] = "test-value" }); + + // Assert + Assert.Equal("test message", content.Content); + Assert.Equal(AuthorRole.User, content.Role); + Assert.Equal(0, content.ChoiceIndex); + Assert.Equal("testModel", content.ModelId); + Assert.Empty(content.ToolCallUpdates!); + Assert.Equal("test-value", content.Metadata!["test-index"]); + Assert.Equal(Encoding.UTF8, content.Encoding); + } + + [Fact] + public void ToStringReturnsAsExpected() + { + // Act + var content = new OpenAIStreamingChatMessageContent( + authorRole: AuthorRole.User, + content: "test message", + choiceIndex: 0, + modelId: "testModel", + toolCallUpdates: [], + metadata: new Dictionary() { ["test-index"] = "test-value" }); + + // Assert + Assert.Equal("test message", content.ToString()); + } + + [Fact] + public void ToByteArrayReturnsAsExpected() + { + // Act + var content = new OpenAIStreamingChatMessageContent( + authorRole: AuthorRole.User, + content: "test message", + choiceIndex: 0, + modelId: "testModel", + toolCallUpdates: [], + metadata: new Dictionary() { ["test-index"] = "test-value" }); + + // Assert + Assert.Equal("test message", Encoding.UTF8.GetString(content.ToByteArray())); + } + + /* + [Theory] + [MemberData(nameof(InvalidChoices))] + public void ConstructorWithInvalidChoiceSetsNullContent(object choice) + { + // Arrange + var streamingChoice = choice as ChatWithDataStreamingChoice; + + // Act + var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); + + // Assert + Assert.Null(content.Content); + } + + public static IEnumerable ValidChoices + { + get + { + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 1" } }] }, "Content 1" }; + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 2", Role = "Assistant" } }] }, "Content 2" }; + } + } + + public static IEnumerable InvalidChoices + { + get + { + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { EndTurn = true }] } }; + yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content", Role = "tool" } }] } }; + } + }*/ +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs similarity index 96% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs index 722ee4d0817c..1010adbab869 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatHistoryExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ChatHistoryExtensionsTests.cs @@ -8,7 +8,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; public class ChatHistoryExtensionsTests { [Fact] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..7fba32d99e35 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelBuilderExtensionsTests.cs @@ -0,0 +1,164 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; + +public class KernelBuilderExtensionsTests +{ + [Fact] + public void ItCanAddTextEmbeddingGenerationService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", "key") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextEmbeddingGenerationServiceWithOpenAIClient() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", new OpenAIClient(new ApiKeyCredential("key"))) + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextToImageService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextToImage("key", modelId: "model") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextToAudioService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAITextToAudio("model", "key") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAIAudioToText("model", "key") + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextServiceWithOpenAIClient() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAIAudioToText("model", new OpenAIClient(new ApiKeyCredential("key"))) + .Build() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + [Obsolete("This test is deprecated and will be removed in a future version.")] + public void ItCanAddFileService() + { + // Arrange + var sut = Kernel.CreateBuilder(); + + // Act + var service = sut.AddOpenAIFiles("key").Build() + .GetRequiredService(); + } + + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.OpenAIClientInline)] + [InlineData(InitializationType.OpenAIClientInServiceProvider)] + public void KernelBuilderAddOpenAIChatCompletionAddsValidService(InitializationType type) + { + // Arrange + var client = new OpenAIClient(new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + builder = type switch + { + InitializationType.ApiKey => builder.AddOpenAIChatCompletion("model-id", "api-key"), + InitializationType.OpenAIClientInline => builder.AddOpenAIChatCompletion("model-id", client), + InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIChatCompletion("model-id"), + _ => builder + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is OpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is OpenAIChatCompletionService); + } + + #endregion + + public enum InitializationType + { + ApiKey, + OpenAIClientInline, + OpenAIClientInServiceProvider, + OpenAIClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelFunctionMetadataExtensionsTests.cs similarity index 92% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelFunctionMetadataExtensionsTests.cs index b45fc64b60ba..e817d559aeaa 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/KernelFunctionMetadataExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/KernelFunctionMetadataExtensionsTests.cs @@ -9,12 +9,12 @@ #pragma warning disable CA1812 // Uninstantiated internal types -namespace SemanticKernel.Connectors.UnitTests.OpenAI.FunctionCalling; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; public sealed class KernelFunctionMetadataExtensionsTests { [Fact] - public void ItCanConvertToOpenAIFunctionNoParameters() + public void ItCanConvertToAzureOpenAIFunctionNoParameters() { // Arrange var sut = new KernelFunctionMetadata("foo") @@ -44,7 +44,7 @@ public void ItCanConvertToOpenAIFunctionNoParameters() } [Fact] - public void ItCanConvertToOpenAIFunctionNoPluginName() + public void ItCanConvertToAzureOpenAIFunctionNoPluginName() { // Arrange var sut = new KernelFunctionMetadata("foo") @@ -76,7 +76,7 @@ public void ItCanConvertToOpenAIFunctionNoPluginName() [Theory] [InlineData(false)] [InlineData(true)] - public void ItCanConvertToOpenAIFunctionWithParameter(bool withSchema) + public void ItCanConvertToAzureOpenAIFunctionWithParameter(bool withSchema) { // Arrange var param1 = new KernelParameterMetadata("param1") @@ -118,7 +118,7 @@ public void ItCanConvertToOpenAIFunctionWithParameter(bool withSchema) } [Fact] - public void ItCanConvertToOpenAIFunctionWithParameterNoType() + public void ItCanConvertToAzureOpenAIFunctionWithParameterNoType() { // Arrange var param1 = new KernelParameterMetadata("param1") { Description = "This is param1" }; @@ -151,7 +151,7 @@ public void ItCanConvertToOpenAIFunctionWithParameterNoType() } [Fact] - public void ItCanConvertToOpenAIFunctionWithNoReturnParameterType() + public void ItCanConvertToAzureOpenAIFunctionWithNoReturnParameterType() { // Arrange var param1 = new KernelParameterMetadata("param1") @@ -180,7 +180,7 @@ public void ItCanConvertToOpenAIFunctionWithNoReturnParameterType() } [Fact] - public void ItCanCreateValidOpenAIFunctionManualForPlugin() + public void ItCanCreateValidAzureOpenAIFunctionManualForPlugin() { // Arrange var kernel = new Kernel(); @@ -197,12 +197,12 @@ public void ItCanCreateValidOpenAIFunctionManualForPlugin() Assert.NotNull(result); Assert.Equal( """{"type":"object","required":["parameter1","parameter2","parameter3"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"type":"string","enum":["Value1","Value2"],"description":"Enum parameter"},"parameter3":{"type":"string","format":"date-time","description":"DateTime parameter"}}}""", - result.Parameters.ToString() + result.FunctionParameters.ToString() ); } [Fact] - public void ItCanCreateValidOpenAIFunctionManualForPrompt() + public void ItCanCreateValidAzureOpenAIFunctionManualForPrompt() { // Arrange var promptTemplateConfig = new PromptTemplateConfig("Hello AI") @@ -232,7 +232,7 @@ public void ItCanCreateValidOpenAIFunctionManualForPrompt() Assert.NotNull(result); Assert.Equal( """{"type":"object","required":["parameter1","parameter2"],"properties":{"parameter1":{"type":"string","description":"String parameter"},"parameter2":{"enum":["Value1","Value2"],"description":"Enum parameter"}}}""", - result.Parameters.ToString() + result.FunctionParameters.ToString() ); } diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIPluginCollectionExtensionsTests.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIPluginCollectionExtensionsTests.cs index c3ee67df7515..6c30829492d5 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIPluginCollectionExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIPluginCollectionExtensionsTests.cs @@ -1,11 +1,14 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using Azure.AI.OpenAI; +using System; +using System.Collections.Generic; +using System.Text.Json; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core; /// /// Unit tests for class. @@ -16,10 +19,11 @@ public sealed class OpenAIPluginCollectionExtensionsTests public void TryGetFunctionAndArgumentsWithNonExistingFunctionReturnsFalse() { // Arrange + var args = JsonSerializer.Serialize(new Dictionary()); var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin"); var plugins = new KernelPluginCollection([plugin]); - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin_MyFunction", string.Empty); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin_MyFunction", BinaryData.FromString(args)); // Act var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); @@ -34,11 +38,12 @@ public void TryGetFunctionAndArgumentsWithNonExistingFunctionReturnsFalse() public void TryGetFunctionAndArgumentsWithoutArgumentsReturnsTrue() { // Arrange + var args = JsonSerializer.Serialize(new Dictionary()); var function = KernelFunctionFactory.CreateFromMethod(() => "Result", "MyFunction"); var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); var plugins = new KernelPluginCollection([plugin]); - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", string.Empty); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin-MyFunction", BinaryData.FromString(args)); // Act var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); @@ -46,7 +51,7 @@ public void TryGetFunctionAndArgumentsWithoutArgumentsReturnsTrue() // Assert Assert.True(result); Assert.Equal(function.Name, actualFunction?.Name); - Assert.Null(actualArguments); + Assert.Empty(actualArguments!); } [Fact] @@ -57,7 +62,7 @@ public void TryGetFunctionAndArgumentsWithArgumentsReturnsTrue() var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); var plugins = new KernelPluginCollection([plugin]); - var toolCall = new ChatCompletionsFunctionToolCall("id", "MyPlugin-MyFunction", "{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}"); + var toolCall = ChatToolCall.CreateFunctionToolCall("id", "MyPlugin-MyFunction", BinaryData.FromString("{\n \"location\": \"San Diego\",\n \"max_price\": 300\n,\n \"null_argument\": null\n}")); // Act var result = plugins.TryGetFunctionAndArguments(toolCall, out var actualFunction, out var actualArguments); diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..47a785d404f1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/ServiceCollectionExtensionsTests.cs @@ -0,0 +1,165 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions; + +public class ServiceCollectionExtensionsTests +{ + #region Chat completion + + [Theory] + [InlineData(InitializationType.ApiKey)] + [InlineData(InitializationType.ClientInline)] + [InlineData(InitializationType.ClientInServiceProvider)] + public void ItCanAddChatCompletionService(InitializationType type) + { + // Arrange + var client = new OpenAIClient(new ApiKeyCredential("key")); + var builder = Kernel.CreateBuilder(); + + builder.Services.AddSingleton(client); + + // Act + IServiceCollection collection = type switch + { + InitializationType.ApiKey => builder.Services.AddOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), + InitializationType.ClientInline => builder.Services.AddOpenAIChatCompletion("deployment-name", client), + InitializationType.ClientInServiceProvider => builder.Services.AddOpenAIChatCompletion("deployment-name"), + _ => builder.Services + }; + + // Assert + var chatCompletionService = builder.Build().GetRequiredService(); + Assert.True(chatCompletionService is OpenAIChatCompletionService); + + var textGenerationService = builder.Build().GetRequiredService(); + Assert.True(textGenerationService is OpenAIChatCompletionService); + } + + #endregion + + [Fact] + public void ItCanAddTextEmbeddingGenerationService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", "key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextEmbeddingGenerationServiceWithOpenAIClient() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextEmbeddingGeneration("model", new OpenAIClient(new ApiKeyCredential("key"))) + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddImageToTextService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextToImage("key", modelId: "model") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddTextToAudioService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAITextToAudio("model", "key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAIAudioToText("model", "key") + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItCanAddAudioToTextServiceWithOpenAIClient() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAIAudioToText("model", new OpenAIClient(new ApiKeyCredential("key"))) + .BuildServiceProvider() + .GetRequiredService(); + + // Assert + Assert.Equal("model", service.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + [Obsolete("This test is deprecated and will be removed in a future version.")] + public void ItCanAddFileService() + { + // Arrange + var sut = new ServiceCollection(); + + // Act + var service = sut.AddOpenAIFiles("key") + .BuildServiceProvider() + .GetRequiredService(); + } + + public enum InitializationType + { + ApiKey, + ClientInline, + ClientInServiceProvider, + ClientEndpoint, + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIAudioToTextServiceTests.cs similarity index 59% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIAudioToTextServiceTests.cs index 40959c7c67ed..65f8f1bae85a 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIAudioToTextServiceTests.cs @@ -1,16 +1,17 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.ClientModel; using System.Net.Http; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; using Moq; +using OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; /// /// Unit tests for class. @@ -43,13 +44,25 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) Assert.Equal("model-id", service.Attributes["ModelId"]); } + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new OpenAIAudioToTextService(" ", "apikey")); + Assert.Throws(() => new OpenAIAudioToTextService(" ", openAIClient: new(new ApiKeyCredential("apikey")))); + Assert.Throws(() => new OpenAIAudioToTextService("", "apikey")); + Assert.Throws(() => new OpenAIAudioToTextService("", openAIClient: new(new ApiKeyCredential("apikey")))); + Assert.Throws(() => new OpenAIAudioToTextService(null!, "apikey")); + Assert.Throws(() => new OpenAIAudioToTextService(null!, openAIClient: new(new ApiKeyCredential("apikey")))); + } + [Theory] [InlineData(true)] [InlineData(false)] public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) { // Arrange & Act - var client = new OpenAIClient("key"); + var client = new OpenAIClient(new ApiKeyCredential("key")); var service = includeLoggerFactory ? new OpenAIAudioToTextService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : new OpenAIAudioToTextService("model-id", client); @@ -77,6 +90,26 @@ public async Task GetTextContentByDefaultWorksCorrectlyAsync() Assert.Equal("Test audio-to-text response", result[0].Text); } + [Fact] + public async Task GetTextContentThrowsIfAudioCantBeReadAsync() + { + // Arrange + var service = new OpenAIAudioToTextService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => { await service.GetTextContentsAsync(new AudioContent(new Uri("http://remote-audio")), new OpenAIAudioToTextExecutionSettings("file.mp3")); }); + } + + [Fact] + public async Task GetTextContentThrowsIfFileNameIsInvalidAsync() + { + // Arrange + var service = new OpenAIAudioToTextService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => { await service.GetTextContentsAsync(new AudioContent(new BinaryData("data"), mimeType: null), new OpenAIAudioToTextExecutionSettings("invalid")); }); + } + public void Dispose() { this._httpClient.Dispose(); diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs new file mode 100644 index 000000000000..f07731755f94 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs @@ -0,0 +1,1446 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Diagnostics; +using Microsoft.SemanticKernel.TextGeneration; +using Moq; +using OpenAI; +using OpenAI.Chat; +using Xunit; + +using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; + +/// +/// Unit tests for +/// +public sealed class OpenAIChatCompletionServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly MultipleHttpMessageHandlerStub _multiMessageHandlerStub; + private readonly HttpClient _httpClient; + private readonly OpenAIFunction _timepluginDate, _timepluginNow; + private readonly OpenAIPromptExecutionSettings _executionSettings; + private readonly Mock _mockLoggerFactory; + private readonly ChatHistory _chatHistoryForTest = [new ChatMessageContent(AuthorRole.User, "test")]; + + public OpenAIChatCompletionServiceTests() + { + this._messageHandlerStub = new HttpMessageHandlerStub(); + this._multiMessageHandlerStub = new MultipleHttpMessageHandlerStub(); + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + + IList functions = KernelPluginFactory.CreateFromFunctions("TimePlugin", new[] + { + KernelFunctionFactory.CreateFromMethod((string? format = null) => DateTime.Now.Date.ToString(format, CultureInfo.InvariantCulture), "Date", "TimePlugin.Date"), + KernelFunctionFactory.CreateFromMethod((string? format = null) => DateTime.Now.ToString(format, CultureInfo.InvariantCulture), "Now", "TimePlugin.Now"), + }).GetFunctionsMetadata(); + + this._timepluginDate = functions[0].ToOpenAIFunction(); + this._timepluginNow = functions[1].ToOpenAIFunction(); + + this._executionSettings = new() + { + ToolCallBehavior = ToolCallBehavior.EnableFunctions([this._timepluginDate, this._timepluginNow]) + }; + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var service = includeLoggerFactory ? + new OpenAIChatCompletionService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : + new OpenAIChatCompletionService("model-id", "api-key", "organization"); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Theory] + [InlineData("http://localhost:1234", "http://localhost:1234/chat/completions")] + [InlineData("http://localhost:8080", "http://localhost:8080/chat/completions")] + [InlineData("https://something:8080", "https://something:8080/chat/completions")] // Accepts TLS Secured endpoints + [InlineData("http://localhost:1234/v2", "http://localhost:1234/v2/chat/completions")] + [InlineData("http://localhost:8080/v2", "http://localhost:8080/v2/chat/completions")] + public async Task ItUsesCustomEndpointsWhenProvidedDirectlyAsync(string endpointProvided, string expectedEndpoint) + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpointProvided)); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.Equal(expectedEndpoint, this._messageHandlerStub.RequestUri!.ToString()); + } + + [Theory] + [InlineData("http://localhost:1234", "http://localhost:1234/chat/completions")] + [InlineData("http://localhost:8080", "http://localhost:8080/chat/completions")] + [InlineData("https://something:8080", "https://something:8080/chat/completions")] // Accepts TLS Secured endpoints + [InlineData("http://localhost:1234/v2", "http://localhost:1234/v2/chat/completions")] + [InlineData("http://localhost:8080/v2", "http://localhost:8080/v2/chat/completions")] + public async Task ItUsesCustomEndpointsWhenProvidedAsBaseAddressAsync(string endpointProvided, string expectedEndpoint) + { + // Arrange + this._httpClient.BaseAddress = new Uri(endpointProvided); + var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpointProvided)); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.Equal(expectedEndpoint, this._messageHandlerStub.RequestUri!.ToString()); + } + + [Fact] + public async Task ItUsesHttpClientEndpointIfProvidedEndpointIsMissingAsync() + { + // Arrange + this._httpClient.BaseAddress = new Uri("http://localhost:12312"); + var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: null!); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.Equal("http://localhost:12312/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); + } + + [Fact] + public async Task ItUsesDefaultEndpointIfProvidedEndpointIsMissingAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: "abc", httpClient: this._httpClient, endpoint: null!); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + Assert.Equal("https://api.openai.com/v1/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) + { + // Arrange & Act + var client = new OpenAIClient(new ApiKeyCredential("key")); + var service = includeLoggerFactory ? + new OpenAIChatCompletionService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : + new OpenAIChatCompletionService("model-id", client); + + // Assert + Assert.NotNull(service); + Assert.Equal("model-id", service.Attributes["ModelId"]); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingAutoAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + + // Act + await chatCompletion.GetChatMessageContentsAsync([new ChatMessageContent(AuthorRole.User, "test")], this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingNowAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + this._executionSettings.ToolCallBehavior = ToolCallBehavior.RequireFunction(this._timepluginNow); + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(1, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + } + + [Fact] + public async Task ItCreatesNoFunctionsWhenUsingNoneAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + this._executionSettings.ToolCallBehavior = null; + + // Act + await chatCompletion.GetChatMessageContentsAsync(this._chatHistoryForTest, this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.False(optionsJson.TryGetProperty("functions", out var _)); + } + + [Fact] + public async Task ItAddsIdToChatMessageAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.Tool, "Hello", metadata: new Dictionary() { { OpenAIChatMessageContent.ToolIdProperty, "John Doe" } }); + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(1, optionsJson.GetProperty("messages").GetArrayLength()); + Assert.Equal("John Doe", optionsJson.GetProperty("messages")[0].GetProperty("tool_call_id").GetString()); + } + + [Fact] + public async Task ItGetChatMessageContentsShouldHaveModelIdDefinedAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse, Encoding.UTF8, "application/json") }; + + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + var chatMessage = await chatCompletion.GetChatMessageContentAsync(chatHistory, this._executionSettings); + + // Assert + Assert.NotNull(chatMessage.ModelId); + Assert.Equal("gpt-3.5-turbo", chatMessage.ModelId); + } + + [Fact] + public async Task ItGetTextContentsShouldHaveModelIdDefinedAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse, Encoding.UTF8, "application/json") }; + + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + var textContent = await chatCompletion.GetTextContentAsync("hello", this._executionSettings); + + // Assert + Assert.NotNull(textContent.ModelId); + Assert.Equal("gpt-3.5-turbo", textContent.ModelId); + } + + [Fact] + public async Task GetStreamingTextContentsWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + var enumerator = service.GetStreamingTextContentsAsync("Prompt").GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + Assert.Equal("Test chat streaming response", enumerator.Current.Text); + + await enumerator.MoveNextAsync(); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() + { + // Arrange + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); + using var stream = File.OpenRead("TestData/chat_completion_streaming_test_response.txt"); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([]).GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + Assert.Equal("Test chat streaming response", enumerator.Current.Content); + + await enumerator.MoveNextAsync(); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); + + await enumerator.MoveNextAsync(); + Assert.NotNull(enumerator.Current.Metadata?["Usage"]); + var serializedUsage = JsonSerializer.Serialize(enumerator.Current.Metadata?["Usage"])!; + Assert.Contains("\"OutputTokenCount\":8", serializedUsage); + Assert.Contains("\"InputTokenCount\":13", serializedUsage); + Assert.Contains("\"TotalTokenCount\":21", serializedUsage); + } + + [Fact] + public async Task ItAddsSystemMessageAsync() + { + // Arrange + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent(ChatCompletionResponse) }; + var chatHistory = new ChatHistory(); + chatHistory.AddMessage(AuthorRole.User, "Hello"); + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, this._executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(1, messages.GetArrayLength()); + + Assert.Equal("Hello", messages[0].GetProperty("content").GetString()); + Assert.Equal("user", messages[0].GetProperty("role").GetString()); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function1 = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var function2 = KernelFunctionFactory.CreateFromMethod((string argument) => + { + functionCallCount++; + throw new ArgumentException("Some exception"); + }, "FunctionWithException"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2])); + + using var multiHttpClient = new HttpClient(this._multiMessageHandlerStub, false); + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization-id", multiHttpClient, this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_multiple_function_calls_test_response.txt")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + + this._multiMessageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + Assert.Equal("Test chat streaming response", enumerator.Current.Content); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + await enumerator.MoveNextAsync(); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + // Keep looping until the end of stream + while (await enumerator.MoveNextAsync()) + { + } + + Assert.Equal(2, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvokeAttemptsAsync() + { + // Arrange + const int DefaultMaximumAutoInvokeAttempts = 128; + const int ModelResponsesCount = 129; + + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function])); + using var multiHttpClient = new HttpClient(this._multiMessageHandlerStub, false); + var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: multiHttpClient, loggerFactory: this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var responses = new List(); + + for (var i = 0; i < ModelResponsesCount; i++) + { + responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }); + } + + this._multiMessageHandlerStub.ResponsesToReturn = responses; + + // Act & Assert + await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([], settings, kernel)) + { + Assert.Equal("Test chat streaming response", chunk.Content); + } + + Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount); + } + + [Fact] + public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync() + { + // Arrange + int functionCallCount = 0; + + var kernel = Kernel.CreateBuilder().Build(); + var function = KernelFunctionFactory.CreateFromMethod((string location) => + { + functionCallCount++; + return "Some weather"; + }, "GetCurrentWeather"); + + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); + var openAIFunction = plugin.GetFunctionsMetadata().First().ToOpenAIFunction(); + + kernel.Plugins.Add(plugin); + using var multiHttpClient = new HttpClient(this._multiMessageHandlerStub, false); + var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: multiHttpClient, loggerFactory: this._mockLoggerFactory.Object); + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) }; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + + this._multiMessageHandlerStub.ResponsesToReturn = [response1, response2]; + + // Act & Assert + var enumerator = service.GetStreamingChatMessageContentsAsync([], settings, kernel).GetAsyncEnumerator(); + + // Function Tool Call Streaming (One Chunk) + await enumerator.MoveNextAsync(); + Assert.Equal("Test chat streaming response", enumerator.Current.Content); + Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]); + + // Chat Completion Streaming (1st Chunk) + await enumerator.MoveNextAsync(); + Assert.Null(enumerator.Current.Metadata?["FinishReason"]); + + // Chat Completion Streaming (2nd Chunk) + await enumerator.MoveNextAsync(); + Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]); + + Assert.Equal(1, functionCallCount); + + var requestContents = this._multiMessageHandlerStub.RequestContents; + + Assert.Equal(2, requestContents.Count); + + requestContents.ForEach(Assert.NotNull); + + var firstContent = Encoding.UTF8.GetString(requestContents[0]!); + var secondContent = Encoding.UTF8.GetString(requestContents[1]!); + + var firstContentJson = JsonSerializer.Deserialize(firstContent); + var secondContentJson = JsonSerializer.Deserialize(secondContent); + + Assert.Equal(1, firstContentJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("MyPlugin-GetCurrentWeather", firstContentJson.GetProperty("tool_choice").GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("none", secondContentJson.GetProperty("tool_choice").GetString()); + } + + [Fact] + public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync() + { + // Arrange + const string Prompt = "This is test prompt"; + const string SystemMessage = "This is test system message"; + + var service = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) + }; + + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddTransient((sp) => service); + Kernel kernel = builder.Build(); + + // Act + var result = await kernel.InvokePromptAsync(Prompt, new(settings)); + + // Assert + Assert.Equal("Test chat response", result.ToString()); + + var requestContentByteArray = this._messageHandlerStub.RequestContent; + + Assert.NotNull(requestContentByteArray); + + var requestContent = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContentByteArray)); + + var messages = requestContent.GetProperty("messages"); + + Assert.Equal(2, messages.GetArrayLength()); + + Assert.Equal(SystemMessage, messages[0].GetProperty("content").GetString()); + Assert.Equal("system", messages[0].GetProperty("role").GetString()); + + Assert.Equal(Prompt, messages[1].GetProperty("content").GetString()); + Assert.Equal("user", messages[1].GetProperty("role").GetString()); + } + + [Fact] + public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync() + { + // Arrange + const string Prompt = "This is test prompt"; + const string SystemMessage = "This is test system message"; + const string AssistantMessage = "This is assistant message"; + const string CollectionItemPrompt = "This is collection item prompt"; + + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; + + using var response = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(ChatCompletionResponse) }; + this._messageHandlerStub.ResponseToReturn = response; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage(Prompt); + chatHistory.AddAssistantMessage(AssistantMessage); + chatHistory.AddUserMessage( + [ + new TextContent(CollectionItemPrompt), + new ImageContent(new Uri("https://image")) + ]); + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, settings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + + Assert.Equal(4, messages.GetArrayLength()); + + Assert.Equal(SystemMessage, messages[0].GetProperty("content").GetString()); + Assert.Equal("system", messages[0].GetProperty("role").GetString()); + + Assert.Equal(Prompt, messages[1].GetProperty("content").GetString()); + Assert.Equal("user", messages[1].GetProperty("role").GetString()); + + Assert.Equal(AssistantMessage, messages[2].GetProperty("content").GetString()); + Assert.Equal("assistant", messages[2].GetProperty("role").GetString()); + + var contentItems = messages[3].GetProperty("content"); + Assert.Equal(2, contentItems.GetArrayLength()); + Assert.Equal(CollectionItemPrompt, contentItems[0].GetProperty("text").GetString()); + Assert.Equal("text", contentItems[0].GetProperty("type").GetString()); + Assert.Equal("https://image/", contentItems[1].GetProperty("image_url").GetProperty("url").GetString()); + Assert.Equal("image_url", contentItems[1].GetProperty("type").GetString()); + } + + [Fact] + public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfTypeFunctionCallContentAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_multiple_function_calls_test_response.json")) + }; + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings); + + // Assert + Assert.NotNull(result); + Assert.Equal(5, result.Items.Count); + + var getCurrentWeatherFunctionCall = result.Items[0] as FunctionCallContent; + Assert.NotNull(getCurrentWeatherFunctionCall); + Assert.Equal("GetCurrentWeather", getCurrentWeatherFunctionCall.FunctionName); + Assert.Equal("MyPlugin", getCurrentWeatherFunctionCall.PluginName); + Assert.Equal("1", getCurrentWeatherFunctionCall.Id); + Assert.Equal("Boston, MA", getCurrentWeatherFunctionCall.Arguments?["location"]?.ToString()); + + var functionWithExceptionFunctionCall = result.Items[1] as FunctionCallContent; + Assert.NotNull(functionWithExceptionFunctionCall); + Assert.Equal("FunctionWithException", functionWithExceptionFunctionCall.FunctionName); + Assert.Equal("MyPlugin", functionWithExceptionFunctionCall.PluginName); + Assert.Equal("2", functionWithExceptionFunctionCall.Id); + Assert.Equal("value", functionWithExceptionFunctionCall.Arguments?["argument"]?.ToString()); + + var nonExistentFunctionCall = result.Items[2] as FunctionCallContent; + Assert.NotNull(nonExistentFunctionCall); + Assert.Equal("NonExistentFunction", nonExistentFunctionCall.FunctionName); + Assert.Equal("MyPlugin", nonExistentFunctionCall.PluginName); + Assert.Equal("3", nonExistentFunctionCall.Id); + Assert.Equal("value", nonExistentFunctionCall.Arguments?["argument"]?.ToString()); + + var invalidArgumentsFunctionCall = result.Items[3] as FunctionCallContent; + Assert.NotNull(invalidArgumentsFunctionCall); + Assert.Equal("InvalidArguments", invalidArgumentsFunctionCall.FunctionName); + Assert.Equal("MyPlugin", invalidArgumentsFunctionCall.PluginName); + Assert.Equal("4", invalidArgumentsFunctionCall.Id); + Assert.Null(invalidArgumentsFunctionCall.Arguments); + Assert.NotNull(invalidArgumentsFunctionCall.Exception); + Assert.Equal("Error: Function call arguments were invalid JSON.", invalidArgumentsFunctionCall.Exception.Message); + Assert.NotNull(invalidArgumentsFunctionCall.Exception.InnerException); + + var intArgumentsFunctionCall = result.Items[4] as FunctionCallContent; + Assert.NotNull(intArgumentsFunctionCall); + Assert.Equal("IntArguments", intArgumentsFunctionCall.FunctionName); + Assert.Equal("MyPlugin", intArgumentsFunctionCall.PluginName); + Assert.Equal("5", intArgumentsFunctionCall.Id); + Assert.Equal("36", intArgumentsFunctionCall.Arguments?["age"]?.ToString()); + } + + [Fact] + public async Task FunctionCallsShouldBeReturnedToLLMAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(ChatCompletionResponse) + }; + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var items = new ChatMessageContentItemCollection + { + new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), + new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }) + }; + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.Assistant, items) + }; + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, settings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(1, messages.GetArrayLength()); + + var assistantMessage = messages[0]; + Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString()); + + Assert.Equal(2, assistantMessage.GetProperty("tool_calls").GetArrayLength()); + + var tool1 = assistantMessage.GetProperty("tool_calls")[0]; + Assert.Equal("1", tool1.GetProperty("id").GetString()); + Assert.Equal("function", tool1.GetProperty("type").GetString()); + + var function1 = tool1.GetProperty("function"); + Assert.Equal("MyPlugin-GetCurrentWeather", function1.GetProperty("name").GetString()); + Assert.Equal("{\"location\":\"Boston, MA\"}", function1.GetProperty("arguments").GetString()); + + var tool2 = assistantMessage.GetProperty("tool_calls")[1]; + Assert.Equal("2", tool2.GetProperty("id").GetString()); + Assert.Equal("function", tool2.GetProperty("type").GetString()); + + var function2 = tool2.GetProperty("function"); + Assert.Equal("MyPlugin-GetWeatherForecast", function2.GetProperty("name").GetString()); + Assert.Equal("{\"location\":\"Boston, MA\"}", function2.GetProperty("arguments").GetString()); + } + + [Fact] + public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(ChatCompletionResponse) + }; + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.Tool, + [ + new FunctionResultContent(new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), "rainy"), + ]), + new ChatMessageContent(AuthorRole.Tool, + [ + new FunctionResultContent(new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }), "sunny") + ]) + }; + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, settings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(2, messages.GetArrayLength()); + + var assistantMessage = messages[0]; + Assert.Equal("tool", assistantMessage.GetProperty("role").GetString()); + Assert.Equal("rainy", assistantMessage.GetProperty("content").GetString()); + Assert.Equal("1", assistantMessage.GetProperty("tool_call_id").GetString()); + + var assistantMessage2 = messages[1]; + Assert.Equal("tool", assistantMessage2.GetProperty("role").GetString()); + Assert.Equal("sunny", assistantMessage2.GetProperty("content").GetString()); + Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); + } + + [Fact] + public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessageAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(ChatCompletionResponse) + }; + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.Tool, + [ + new FunctionResultContent(new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), "rainy"), + new FunctionResultContent(new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }), "sunny") + ]) + }; + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, settings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(2, messages.GetArrayLength()); + + var assistantMessage = messages[0]; + Assert.Equal("tool", assistantMessage.GetProperty("role").GetString()); + Assert.Equal("rainy", assistantMessage.GetProperty("content").GetString()); + Assert.Equal("1", assistantMessage.GetProperty("tool_call_id").GetString()); + + var assistantMessage2 = messages[1]; + Assert.Equal("tool", assistantMessage2.GetProperty("role").GetString()); + Assert.Equal("sunny", assistantMessage2.GetProperty("content").GetString()); + Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); + } + + [Theory] + [InlineData("string", "json_object")] + [InlineData("string", "text")] + [InlineData("string", "random")] + [InlineData("JsonElement.String", "\"json_object\"")] + [InlineData("JsonElement.String", "\"text\"")] + [InlineData("JsonElement.String", "\"random\"")] + [InlineData("ChatResponseFormat", "json_object")] + [InlineData("ChatResponseFormat", "text")] + public async Task GetChatMessageInResponseFormatsAsync(string formatType, string formatValue) + { + // Assert + object? format = null; + switch (formatType) + { + case "string": + format = formatValue; + break; + case "JsonElement.String": + format = JsonSerializer.Deserialize(formatValue); + break; + case "ChatResponseFormat": + format = formatValue == "text" ? ChatResponseFormat.CreateTextFormat() : ChatResponseFormat.CreateJsonObjectFormat(); + break; + } + + var modelId = "gpt-4o"; + var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient); + OpenAIPromptExecutionSettings executionSettings = new() { ResponseFormat = format }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) + }; + + // Act + var result = await sut.GetChatMessageContentAsync(this._chatHistoryForTest, executionSettings); + + // Assert + Assert.NotNull(result); + } + + [Fact(Skip = "Not working running in the console")] + public async Task GetInvalidResponseThrowsExceptionAndIsCapturedByDiagnosticsAsync() + { + // Arrange + bool startedChatCompletionsActivity = false; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { Content = new StringContent("Invalid JSON") }; + + var sut = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + + // Enable ModelDiagnostics + using var listener = new ActivityListener() + { + ShouldListenTo = (activitySource) => true, //activitySource.Name == typeof(ModelDiagnostics).Namespace!, + ActivityStarted = (activity) => + { + if (activity.OperationName == "chat.completions model-id") + { + startedChatCompletionsActivity = true; + } + }, + Sample = (ref ActivityCreationOptions options) => ActivitySamplingResult.AllData, + }; + + ActivitySource.AddActivityListener(listener); + + Environment.SetEnvironmentVariable("SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS", "true"); + Environment.SetEnvironmentVariable("SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE", "true"); + + // Act & Assert + await Assert.ThrowsAnyAsync(async () => { await sut.GetChatMessageContentsAsync(this._chatHistoryForTest); }); + + Assert.True(ModelDiagnostics.HasListeners()); + Assert.True(ModelDiagnostics.IsSensitiveEventsEnabled()); + Assert.True(ModelDiagnostics.IsModelDiagnosticsEnabled()); + Assert.True(startedChatCompletionsActivity); + } + + [Fact] + public async Task GetChatMessageContentShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_single_function_call_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(firstResponse); + + using var secondResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await sut.GetChatMessageContentAsync(chatHistory, new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + + [Fact] + public async Task GetStreamingChatMessageContentsShouldSendMutatedChatHistoryToLLM() + { + // Arrange + static void MutateChatHistory(AutoFunctionInvocationContext context, Func next) + { + // Remove the function call messages from the chat history to reduce token count. + context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages. + + next(context); + } + + var kernel = new Kernel(); + kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]); + kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory)); + + using var firstResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(firstResponse); + + using var secondResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(secondResponse); + + var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + var chatHistory = new ChatHistory + { + new ChatMessageContent(AuthorRole.User, "What time is it?"), + new ChatMessageContent(AuthorRole.Assistant, [ + new FunctionCallContent("Date", "TimePlugin", "2") + ]), + new ChatMessageContent(AuthorRole.Tool, [ + new FunctionResultContent("Date", "TimePlugin", "2", "rainy") + ]), + new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"), + new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?") + }; + + // Act + await foreach (var update in sut.GetStreamingChatMessageContentsAsync(chatHistory, new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel)) + { + } + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + + var messages = optionsJson.GetProperty("messages"); + Assert.Equal(5, messages.GetArrayLength()); + + var userFirstPrompt = messages[0]; + Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString()); + Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString()); + + var assistantFirstResponse = messages[1]; + Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString()); + Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString()); + + var userSecondPrompt = messages[2]; + Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString()); + Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString()); + + var assistantSecondResponse = messages[3]; + Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString()); + Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString()); + Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString()); + + var functionResult = messages[4]; + Assert.Equal("tool", functionResult.GetProperty("role").GetString()); + Assert.Equal("rainy", functionResult.GetProperty("content").GetString()); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public async Task GetChatMessageContentsSendsValidJsonSchemaForStructuredOutputs(bool typedResponseFormat) + { + // Arrange + object responseFormat = typedResponseFormat ? typeof(MathReasoning) : ChatResponseFormat.CreateJsonSchemaFormat( + jsonSchemaFormatName: "MathReasoning", + jsonSchema: BinaryData.FromString(""" + { + "type": "object", + "properties": { + "Steps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "Explanation": { "type": "string" }, + "Output": { "type": "string" } + }, + "required": ["Explanation", "Output"], + "additionalProperties": false + } + }, + "FinalAnswer": { "type": "string" } + }, + "required": ["Steps", "FinalAnswer"], + "additionalProperties": false + } + """), + jsonSchemaIsStrict: true); + + var executionSettings = new OpenAIPromptExecutionSettings { ResponseFormat = responseFormat }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) + }; + + var sut = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + + // Act + await sut.GetChatMessageContentsAsync(this._chatHistoryForTest, executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var requestJsonElement = JsonSerializer.Deserialize(actualRequestContent); + var requestResponseFormat = requestJsonElement.GetProperty("response_format"); + + Assert.Equal("json_schema", requestResponseFormat.GetProperty("type").GetString()); + Assert.Equal("MathReasoning", requestResponseFormat.GetProperty("json_schema").GetProperty("name").GetString()); + Assert.True(requestResponseFormat.GetProperty("json_schema").GetProperty("strict").GetBoolean()); + + var schema = requestResponseFormat.GetProperty("json_schema").GetProperty("schema"); + + Assert.Equal("object", schema.GetProperty("type").GetString()); + Assert.False(schema.GetProperty("additionalProperties").GetBoolean()); + Assert.Equal(2, schema.GetProperty("required").GetArrayLength()); + + var requiredParentProperties = new List + { + schema.GetProperty("required")[0].GetString(), + schema.GetProperty("required")[1].GetString(), + }; + + Assert.Contains("Steps", requiredParentProperties); + Assert.Contains("FinalAnswer", requiredParentProperties); + + var schemaProperties = schema.GetProperty("properties"); + + Assert.Equal("string", schemaProperties.GetProperty("FinalAnswer").GetProperty("type").GetString()); + Assert.Equal("array", schemaProperties.GetProperty("Steps").GetProperty("type").GetString()); + + var items = schemaProperties.GetProperty("Steps").GetProperty("items"); + + Assert.Equal("object", items.GetProperty("type").GetString()); + Assert.False(items.GetProperty("additionalProperties").GetBoolean()); + Assert.Equal(2, items.GetProperty("required").GetArrayLength()); + + var requiredChildProperties = new List + { + items.GetProperty("required")[0].GetString(), + items.GetProperty("required")[1].GetString(), + }; + + Assert.Contains("Explanation", requiredChildProperties); + Assert.Contains("Output", requiredChildProperties); + + var itemsProperties = items.GetProperty("properties"); + + Assert.Equal("string", itemsProperties.GetProperty("Explanation").GetProperty("type").GetString()); + Assert.Equal("string", itemsProperties.GetProperty("Output").GetProperty("type").GetString()); + } + + [Theory] + [InlineData(typeof(TestStruct))] + [InlineData(typeof(TestStruct?))] + public async Task GetChatMessageContentsSendsValidJsonSchemaWithStruct(Type responseFormatType) + { + // Arrange + var executionSettings = new OpenAIPromptExecutionSettings { ResponseFormat = responseFormatType }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) + }; + + var sut = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + + // Act + await sut.GetChatMessageContentsAsync(this._chatHistoryForTest, executionSettings); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var requestJsonElement = JsonSerializer.Deserialize(actualRequestContent); + var requestResponseFormat = requestJsonElement.GetProperty("response_format"); + + Assert.Equal("json_schema", requestResponseFormat.GetProperty("type").GetString()); + Assert.Equal("TestStruct", requestResponseFormat.GetProperty("json_schema").GetProperty("name").GetString()); + Assert.True(requestResponseFormat.GetProperty("json_schema").GetProperty("strict").GetBoolean()); + + var schema = requestResponseFormat.GetProperty("json_schema").GetProperty("schema"); + + Assert.Equal("object", schema.GetProperty("type").GetString()); + Assert.False(schema.GetProperty("additionalProperties").GetBoolean()); + Assert.Equal(2, schema.GetProperty("required").GetArrayLength()); + + var requiredParentProperties = new List + { + schema.GetProperty("required")[0].GetString(), + schema.GetProperty("required")[1].GetString(), + }; + + Assert.Contains("TextProperty", requiredParentProperties); + Assert.Contains("NumericProperty", requiredParentProperties); + } + + [Fact] + public async Task GetChatMessageContentReturnsRefusal() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("TestData/chat_completion_refusal_test_response.json")) + }; + + var sut = new OpenAIChatCompletionService("model-id", "api-key", httpClient: this._httpClient); + + // Act + var content = await sut.GetChatMessageContentAsync(this._chatHistoryForTest); + + // Assert + var refusal = content.Metadata?["Refusal"] as string; + + Assert.NotNull(refusal); + Assert.Equal("I'm sorry, I cannot assist with that request.", refusal); + } + + [Fact] + public async Task GetStreamingChatMessageContentsReturnsRefusal() + { + // Arrange + var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); + using var stream = File.OpenRead("TestData/chat_completion_streaming_refusal_test_response.txt"); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var enumerator = service.GetStreamingChatMessageContentsAsync([]).GetAsyncEnumerator(); + + await enumerator.MoveNextAsync(); + + // Assert + var refusalUpdate = enumerator.Current.Metadata?["RefusalUpdate"] as string; + + Assert.NotNull(refusalUpdate); + Assert.Equal("I'm sorry, I cannot assist with that request.", refusalUpdate); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingAutoFunctionChoiceBehaviorAsync() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("TimePlugin", [ + KernelFunctionFactory.CreateFromMethod(() => { }, "Date"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Now") + ]); + + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + using var response = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(response); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("auto", optionsJson.GetProperty("tool_choice").ToString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingNoneFunctionChoiceBehaviorAsync() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("TimePlugin", [ + KernelFunctionFactory.CreateFromMethod(() => { }, "Date"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Now") + ]); + + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + using var response = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(response); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("none", optionsJson.GetProperty("tool_choice").ToString()); + } + + [Fact] + public async Task ItCreatesCorrectFunctionToolCallsWhenUsingRequiredFunctionChoiceBehaviorAsync() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("TimePlugin", [ + KernelFunctionFactory.CreateFromMethod(() => { }, "Date"), + KernelFunctionFactory.CreateFromMethod(() => { }, "Now") + ]); + + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + using var response = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(response); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required() }; + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); + Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); + Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); + + Assert.Equal("required", optionsJson.GetProperty("tool_choice").ToString()); + } + + [Fact] + public async Task ItDoesNotChangeDefaultsForToolsAndChoiceIfNeitherOfFunctionCallingConfigurationsSetAsync() + { + // Arrange + var kernel = new Kernel(); + + var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); + + using var response = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) }; + this._messageHandlerStub.ResponseQueue.Enqueue(response); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Fake prompt"); + + var executionSettings = new OpenAIPromptExecutionSettings(); // Neither ToolCallBehavior nor FunctionChoiceBehavior is set. + + // Act + await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel); + + // Assert + var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + Assert.NotNull(actualRequestContent); + + var optionsJson = JsonSerializer.Deserialize(actualRequestContent); + Assert.False(optionsJson.TryGetProperty("tools", out var _)); + Assert.False(optionsJson.TryGetProperty("tool_choice", out var _)); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + this._multiMessageHandlerStub.Dispose(); + } + + private sealed class AutoFunctionInvocationFilter : IAutoFunctionInvocationFilter + { + private readonly Func, Task> _callback; + + public AutoFunctionInvocationFilter(Func, Task> callback) + { + Verify.NotNull(callback, nameof(callback)); + this._callback = callback; + } + + public AutoFunctionInvocationFilter(Action> callback) + { + Verify.NotNull(callback, nameof(callback)); + this._callback = (c, n) => { callback(c, n); return Task.CompletedTask; }; + } + + public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + await this._callback(context, next); + } + } + + private const string ChatCompletionResponse = """ + { + "id": "chatcmpl-8IlRBQU929ym1EqAY2J4T7GGkW5Om", + "object": "chat.completion", + "created": 1699482945, + "model": "gpt-3.5-turbo", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls":[{ + "id": "1", + "type": "function", + "function": { + "name": "TimePlugin-Date", + "arguments": "{}" + } + } + ] + }, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 52, + "completion_tokens": 1, + "total_tokens": 53 + } + } + """; + +#pragma warning disable CS8618, CA1812 + private sealed class MathReasoning + { + public List Steps { get; set; } + + public string FinalAnswer { get; set; } + } + + private sealed class MathReasoningStep + { + public string Explanation { get; set; } + + public string Output { get; set; } + } + + private struct TestStruct + { + public string TextProperty { get; set; } + + public int? NumericProperty { get; set; } + } +#pragma warning restore CS8618, CA1812 +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/Files/OpenAIFileServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIFileServiceTests.cs similarity index 98% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/Files/OpenAIFileServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIFileServiceTests.cs index b9619fc1bc58..c763e729e381 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/Files/OpenAIFileServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIFileServiceTests.cs @@ -12,11 +12,12 @@ using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.Files; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Files; /// /// Unit tests for class. /// +[Obsolete("This class is deprecated and will be removed in a future version.")] public sealed class OpenAIFileServiceTests : IDisposable { private readonly HttpMessageHandlerStub _messageHandlerStub; diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs new file mode 100644 index 000000000000..559de48519b3 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextEmbeddingGenerationServiceTests.cs @@ -0,0 +1,134 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; +using Moq; +using OpenAI; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public class OpenAITextEmbeddingGenerationServiceTests +{ + [Fact] + public void ItCanBeInstantiatedAndPropertiesSetAsExpected() + { + // Arrange + var sut = new OpenAITextEmbeddingGenerationService("model", "apiKey", dimensions: 2); + var sutWithOpenAIClient = new OpenAITextEmbeddingGenerationService("model", new OpenAIClient(new ApiKeyCredential("apiKey")), dimensions: 2); + + // Assert + Assert.NotNull(sut); + Assert.NotNull(sutWithOpenAIClient); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + Assert.Equal("model", sutWithOpenAIClient.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(" ", "apikey")); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(" ", openAIClient: new(new ApiKeyCredential("apikey")))); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService("", "apikey")); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService("", openAIClient: new(new ApiKeyCredential("apikey")))); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(null!, "apikey")); + Assert.Throws(() => new OpenAITextEmbeddingGenerationService(null!, openAIClient: new(new ApiKeyCredential("apikey")))); + } + + [Fact] + public async Task ItGetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsEmpty() + { + // Arrange + var sut = new OpenAITextEmbeddingGenerationService("model", "apikey"); + + // Act + var result = await sut.GenerateEmbeddingsAsync([], null, CancellationToken.None); + + // Assert + Assert.Empty(result); + } + + [Fact] + public async Task GetEmbeddingsAsyncReturnsEmptyWhenProvidedDataIsWhitespace() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client); + + // Act + var result = await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None); + + // Assert + Assert.Single(result); + Assert.Equal(4, result[0].Length); + } + + [Fact] + public async Task ItThrowsIfNumberOfResultsDiffersFromInputsAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-multiple-response.txt")) + } + }; + using HttpClient client = new(handler); + + var sut = new OpenAITextEmbeddingGenerationService("model", "apikey", httpClient: client); + + // Act & Assert + await Assert.ThrowsAsync(async () => await sut.GenerateEmbeddingsAsync(["test"], null, CancellationToken.None)); + } + + [Fact] + public async Task GetEmbeddingsDoesLogActionAsync() + { + // Arrange + using HttpMessageHandlerStub handler = new() + { + ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-embeddings-response.txt")) + } + }; + using HttpClient client = new(handler); + + var modelId = "dall-e-2"; + var logger = new Mock>(); + logger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true); + + var mockLoggerFactory = new Mock(); + mockLoggerFactory.Setup(x => x.CreateLogger(It.IsAny())).Returns(logger.Object); + + var sut = new OpenAITextEmbeddingGenerationService(modelId, "apiKey", httpClient: client, loggerFactory: mockLoggerFactory.Object); + + // Act + await sut.GenerateEmbeddingsAsync(["description"]); + + // Assert + logger.VerifyLog(LogLevel.Information, $"Action: {nameof(OpenAITextEmbeddingGenerationService.GenerateEmbeddingsAsync)}. OpenAI Model ID: {modelId}.", Times.Once()); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToAudioServiceTests.cs similarity index 53% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioServiceTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToAudioServiceTests.cs index 588616f54348..e20d28385293 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioServiceTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToAudioServiceTests.cs @@ -5,13 +5,14 @@ using System.Linq; using System.Net; using System.Net.Http; +using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.Connectors.OpenAI; using Moq; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToAudio; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; /// /// Unit tests for class. @@ -42,6 +43,16 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) // Assert Assert.NotNull(service); Assert.Equal("model-id", service.Attributes["ModelId"]); + Assert.Equal("Organization", OpenAITextToAudioService.OrganizationKey); + } + + [Fact] + public void ItThrowsIfModelIdIsNotProvided() + { + // Act & Assert + Assert.Throws(() => new OpenAITextToAudioService(" ", "apikey")); + Assert.Throws(() => new OpenAITextToAudioService("", "apikey")); + Assert.Throws(() => new OpenAITextToAudioService(null!, "apikey")); } [Theory] @@ -50,7 +61,7 @@ public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAIT { // Arrange var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); - await using var stream = new MemoryStream(new byte[] { 0x00, 0x00, 0xFF, 0x7F }); + using var stream = new MemoryStream([0x00, 0x00, 0xFF, 0x7F]); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -58,7 +69,7 @@ public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAIT }; // Act - var exception = await Record.ExceptionAsync(() => service.GetAudioContentsAsync("Some text", settings)); + var exception = await Assert.ThrowsAnyAsync(async () => await service.GetAudioContentsAsync("Some text", settings)); // Assert Assert.NotNull(exception); @@ -69,10 +80,10 @@ public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAIT public async Task GetAudioContentByDefaultWorksCorrectlyAsync() { // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); + using var stream = new MemoryStream(expectedByteArray); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -80,7 +91,7 @@ public async Task GetAudioContentByDefaultWorksCorrectlyAsync() }; // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); + var result = await service.GetAudioContentsAsync("Some text"); // Assert var audioData = result[0].Data!.Value; @@ -88,13 +99,68 @@ public async Task GetAudioContentByDefaultWorksCorrectlyAsync() Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); } + [Theory] + [InlineData("echo", "wav")] + [InlineData("fable", "opus")] + [InlineData("onyx", "flac")] + [InlineData("nova", "aac")] + [InlineData("shimmer", "pcm")] + public async Task GetAudioContentVoicesWorksCorrectlyAsync(string voice, string format) + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); + using var stream = new MemoryStream(expectedByteArray); + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StreamContent(stream) + }; + + // Act + var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings(voice) { ResponseFormat = format }); + + // Assert + var requestBody = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); + var audioData = result[0].Data!.Value; + Assert.Contains($"\"voice\":\"{voice}\"", requestBody); + Assert.Contains($"\"response_format\":\"{format}\"", requestBody); + Assert.False(audioData.IsEmpty); + Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); + } + + [Fact] + public async Task GetAudioContentThrowsWhenVoiceIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice"))); + } + + [Fact] + public async Task GetAudioContentThrowsWhenFormatIsNotSupportedAsync() + { + // Arrange + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; + + var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); + + // Act & Assert + await Assert.ThrowsAsync(async () => await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings() { ResponseFormat = "not supported" })); + } + [Theory] [InlineData(true, "http://local-endpoint")] [InlineData(false, "https://api.openai.com")] public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAddress, string expectedBaseAddress) { // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; + byte[] expectedByteArray = [0x00, 0x00, 0xFF, 0x7F]; if (useHttpClientBaseAddress) { @@ -102,7 +168,7 @@ public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAdd } var service = new OpenAITextToAudioService("model-id", "api-key", "organization", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); + using var stream = new MemoryStream(expectedByteArray); this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { @@ -110,7 +176,7 @@ public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAdd }; // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); + var result = await service.GetAudioContentsAsync("Some text"); // Assert Assert.StartsWith(expectedBaseAddress, this._messageHandlerStub.RequestUri!.AbsoluteUri, StringComparison.InvariantCulture); @@ -124,6 +190,6 @@ public void Dispose() public static TheoryData ExecutionSettings => new() { - { new OpenAITextToAudioExecutionSettings(""), typeof(ArgumentException) }, + { new OpenAITextToAudioExecutionSettings("invalid"), typeof(NotSupportedException) }, }; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs new file mode 100644 index 000000000000..bd40dfe997a3 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAITextToImageServiceTests.cs @@ -0,0 +1,260 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Services; +using Microsoft.SemanticKernel.TextToImage; +using Moq; +using OpenAI.Images; +using Xunit; + +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Services; + +/// +/// Unit tests for class. +/// +public sealed class OpenAITextToImageServiceTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub; + private readonly HttpClient _httpClient; + private readonly Mock _mockLoggerFactory; + + public OpenAITextToImageServiceTests() + { + this._messageHandlerStub = new() + { + ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-to-image-response.json")) + } + }; + this._httpClient = new HttpClient(this._messageHandlerStub, false); + this._mockLoggerFactory = new Mock(); + } + + [Fact] + public void ConstructorWorksCorrectly() + { + // Arrange & Act + var sut = new OpenAITextToImageService("apiKey", "organization", "model"); + + // Assert + Assert.NotNull(sut); + Assert.Equal("organization", sut.Attributes[ClientCore.OrganizationKey]); + Assert.Equal("model", sut.Attributes[AIServiceExtensions.ModelIdKey]); + } + + [Theory] + [InlineData(256, 256, "dall-e-2")] + [InlineData(512, 512, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-2")] + [InlineData(1024, 1024, "dall-e-3")] + [InlineData(1024, 1792, "dall-e-3")] + [InlineData(1792, 1024, "dall-e-3")] + [InlineData(123, 321, "custom-model-1")] + [InlineData(179, 124, "custom-model-2")] + public async Task GenerateImageWorksCorrectlyAsync(int width, int height, string modelId) + { + // Arrange + var sut = new OpenAITextToImageService("api-key", modelId: modelId, httpClient: this._httpClient); + Assert.Equal(modelId, sut.Attributes["ModelId"]); + + // Act + var result = await sut.GenerateImageAsync("description", width, height); + + // Assert + Assert.Equal("https://image-url/", result); + } + + [Theory] + [InlineData(null, null)] + [InlineData("uri", "url")] + [InlineData("url", "url")] + [InlineData("GeneratedImage.Uri", "url")] + [InlineData("bytes", "b64_json")] + [InlineData("b64_json", "b64_json")] + [InlineData("GeneratedImage.Bytes", "b64_json")] + public async Task GetUriImageContentsResponseFormatRequestWorksCorrectlyAsync(string? responseFormatOption, string? expectedResponseFormat) + { + // Arrange + object? responseFormatObject = null; + + switch (responseFormatOption) + { + case "GeneratedImage.Uri": responseFormatObject = GeneratedImageFormat.Uri; break; + case "GeneratedImage.Bytes": responseFormatObject = GeneratedImageFormat.Bytes; break; + default: responseFormatObject = responseFormatOption; break; + } + + var sut = new OpenAITextToImageService("api-key", httpClient: this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { ResponseFormat = responseFormatObject }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedResponseFormat is not null) + { + Assert.Contains($"\"response_format\":\"{expectedResponseFormat}\"", requestBody); + } + else + { + // Then no response format is provided, it should not be included in the request body + Assert.DoesNotContain("response_format", requestBody); + } + } + + [Theory] + [InlineData(null, null)] + [InlineData("hd", "hd")] + [InlineData("high", "hd")] + [InlineData("standard", "standard")] + public async Task GetUriImageContentsImageQualityRequestWorksCorrectlyAsync(string? quality, string? expectedQuality) + { + // Arrange + var sut = new OpenAITextToImageService("api-key", httpClient: this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { Quality = quality }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedQuality is not null) + { + Assert.Contains($"\"quality\":\"{expectedQuality}\"", requestBody); + } + else + { + // Then no quality is provided, it should not be included in the request body + Assert.DoesNotContain("quality", requestBody); + } + } + + [Theory] + [InlineData(null, null)] + [InlineData("vivid", "vivid")] + [InlineData("natural", "natural")] + public async Task GetUriImageContentsImageStyleRequestWorksCorrectlyAsync(string? style, string? expectedStyle) + { + // Arrange + var sut = new OpenAITextToImageService("api-key", httpClient: this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { Style = style }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedStyle is not null) + { + Assert.Contains($"\"style\":\"{expectedStyle}\"", requestBody); + } + else + { + // Then no style is provided, it should not be included in the request body + Assert.DoesNotContain("style", requestBody); + } + } + + [Theory] + [InlineData(null, null, null)] + [InlineData(1, 2, "1x2")] + public async Task GetUriImageContentsImageSizeRequestWorksCorrectlyAsync(int? width, int? height, string? expectedSize) + { + // Arrange + var sut = new OpenAITextToImageService("api-key", httpClient: this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings + { + Size = width.HasValue && height.HasValue + ? (width.Value, height.Value) + : null + }); + + // Assert + Assert.NotNull(result); + Assert.NotNull(this._messageHandlerStub.RequestContent); + + var requestBody = UTF8Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent); + if (expectedSize is not null) + { + Assert.Contains($"\"size\":\"{expectedSize}\"", requestBody); + } + else + { + // Then no size is provided, it should not be included in the request body + Assert.DoesNotContain("size", requestBody); + } + } + + [Fact] + public async Task GetByteImageContentsResponseWorksCorrectlyAsync() + { + // Arrange + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(File.ReadAllText("./TestData/text-to-image-b64_json-format-response.json")) + }; + + var sut = new OpenAITextToImageService("api-key", httpClient: this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { ResponseFormat = "b64_json" }); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + var imageContent = result[0]; + Assert.NotNull(imageContent); + Assert.True(imageContent.CanRead); + Assert.Equal("image/png", imageContent.MimeType); + Assert.NotNull(imageContent.InnerContent); + Assert.IsType(imageContent.InnerContent); + + var breakingGlass = imageContent.InnerContent as GeneratedImage; + Assert.Equal("my prompt", breakingGlass!.RevisedPrompt); + } + + [Fact] + public async Task GetUrlImageContentsResponseWorksCorrectlyAsync() + { + // Arrange + var sut = new OpenAITextToImageService("api-key", httpClient: this._httpClient); + + // Act + var result = await sut.GetImageContentsAsync("my prompt", new OpenAITextToImageExecutionSettings { ResponseFormat = "url" }); + + // Assert + Assert.NotNull(result); + Assert.Single(result); + var imageContent = result[0]; + Assert.NotNull(imageContent); + Assert.False(imageContent.CanRead); + Assert.Equal(new Uri("https://image-url/"), imageContent.Uri); + Assert.NotNull(imageContent.InnerContent); + Assert.IsType(imageContent.InnerContent); + + var breakingGlass = imageContent.InnerContent as GeneratedImage; + Assert.Equal("my prompt", breakingGlass!.RevisedPrompt); + } + + public void Dispose() + { + this._httpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIAudioToTextExecutionSettingsTests.cs similarity index 90% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextExecutionSettingsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIAudioToTextExecutionSettingsTests.cs index 96dd9c1a290b..66390ddfd94d 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/OpenAIAudioToTextExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIAudioToTextExecutionSettingsTests.cs @@ -6,7 +6,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText; +namespace SemanticKernel.Connectors.OpenAI.UniTests.Settings; /// /// Unit tests for class. @@ -28,7 +28,7 @@ public void ItReturnsValidOpenAIAudioToTextExecutionSettings() ModelId = "model_id", Language = "en", Prompt = "prompt", - ResponseFormat = "text", + ResponseFormat = "srt", Temperature = 0.2f }; @@ -49,7 +49,7 @@ public void ItCreatesOpenAIAudioToTextExecutionSettingsFromJson() "language": "en", "filename": "file.mp3", "prompt": "prompt", - "response_format": "text", + "response_format": "verbose_json", "temperature": 0.2 } """; @@ -65,7 +65,7 @@ public void ItCreatesOpenAIAudioToTextExecutionSettingsFromJson() Assert.Equal("en", settings.Language); Assert.Equal("file.mp3", settings.Filename); Assert.Equal("prompt", settings.Prompt); - Assert.Equal("text", settings.ResponseFormat); + Assert.Equal("verbose_json", settings.ResponseFormat); Assert.Equal(0.2f, settings.Temperature); } @@ -77,7 +77,7 @@ public void ItClonesAllProperties() ModelId = "model_id", Language = "en", Prompt = "prompt", - ResponseFormat = "text", + ResponseFormat = "json", Temperature = 0.2f, Filename = "something.mp3", }; @@ -88,7 +88,7 @@ public void ItClonesAllProperties() Assert.Equal("model_id", clone.ModelId); Assert.Equal("en", clone.Language); Assert.Equal("prompt", clone.Prompt); - Assert.Equal("text", clone.ResponseFormat); + Assert.Equal("json", clone.ResponseFormat); Assert.Equal(0.2f, clone.Temperature); Assert.Equal("something.mp3", clone.Filename); } @@ -101,7 +101,7 @@ public void ItFreezesAndPreventsMutation() ModelId = "model_id", Language = "en", Prompt = "prompt", - ResponseFormat = "text", + ResponseFormat = "vtt", Temperature = 0.2f, Filename = "something.mp3", }; @@ -112,7 +112,7 @@ public void ItFreezesAndPreventsMutation() Assert.Throws(() => settings.ModelId = "new_model"); Assert.Throws(() => settings.Language = "some_format"); Assert.Throws(() => settings.Prompt = "prompt"); - Assert.Throws(() => settings.ResponseFormat = "something"); + Assert.Throws(() => settings.ResponseFormat = "vtt"); Assert.Throws(() => settings.Temperature = 0.2f); Assert.Throws(() => settings.Filename = "something"); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs similarity index 87% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index b64649230d96..567c77babeea 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -7,10 +7,10 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI; +namespace SemanticKernel.Connectors.OpenAI.UnitTests.Settings; /// -/// Unit tests of OpenAIPromptExecutionSettings +/// Unit tests of OpenAIPromptExecutionSettingsTests /// public class OpenAIPromptExecutionSettingsTests { @@ -23,16 +23,14 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults() // Assert Assert.NotNull(executionSettings); - Assert.Equal(1, executionSettings.Temperature); - Assert.Equal(1, executionSettings.TopP); - Assert.Equal(0, executionSettings.FrequencyPenalty); - Assert.Equal(0, executionSettings.PresencePenalty); - Assert.Equal(1, executionSettings.ResultsPerPrompt); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); + Assert.Null(executionSettings.FrequencyPenalty); + Assert.Null(executionSettings.PresencePenalty); Assert.Null(executionSettings.StopSequences); Assert.Null(executionSettings.TokenSelectionBiases); Assert.Null(executionSettings.TopLogprobs); Assert.Null(executionSettings.Logprobs); - Assert.Null(executionSettings.AzureChatExtensionsOptions); Assert.Equal(128, executionSettings.MaxTokens); } @@ -46,7 +44,6 @@ public void ItUsesExistingOpenAIExecutionSettings() TopP = 0.7, FrequencyPenalty = 0.7, PresencePenalty = 0.7, - ResultsPerPrompt = 2, StopSequences = new string[] { "foo", "bar" }, ChatSystemPrompt = "chat system prompt", MaxTokens = 128, @@ -61,6 +58,7 @@ public void ItUsesExistingOpenAIExecutionSettings() // Assert Assert.NotNull(executionSettings); Assert.Equal(actualSettings, executionSettings); + Assert.Equal(128, executionSettings.MaxTokens); } [Fact] @@ -232,7 +230,6 @@ public void PromptExecutionSettingsFreezeWorksAsExpected() // Assert Assert.True(executionSettings.IsFrozen); Assert.Throws(() => executionSettings.ModelId = "gpt-4"); - Assert.Throws(() => executionSettings.ResultsPerPrompt = 2); Assert.Throws(() => executionSettings.Temperature = 1); Assert.Throws(() => executionSettings.TopP = 1); Assert.Throws(() => executionSettings.StopSequences?.Add("STOP")); @@ -246,14 +243,32 @@ public void PromptExecutionSettingsFreezeWorksAsExpected() public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences() { // Arrange - var executionSettings = new OpenAIPromptExecutionSettings { StopSequences = [] }; + PromptExecutionSettings settings = new OpenAIPromptExecutionSettings { StopSequences = [] }; + + // Act + var executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(settings); + + // Assert + Assert.NotNull(executionSettings.StopSequences); + Assert.Empty(executionSettings.StopSequences); + } + + [Fact] + public void ItRestoresOriginalFunctionChoiceBehavior() + { + // Arrange + var functionChoiceBehavior = FunctionChoiceBehavior.None(); + + var originalExecutionSettings = new PromptExecutionSettings + { + FunctionChoiceBehavior = functionChoiceBehavior + }; // Act -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - var executionSettingsWithData = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); -#pragma warning restore CS0618 + var result = OpenAIPromptExecutionSettings.FromExecutionSettings(originalExecutionSettings); + // Assert - Assert.Null(executionSettingsWithData.StopSequences); + Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior); } private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) @@ -263,7 +278,6 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut Assert.Equal(0.7, executionSettings.TopP); Assert.Equal(0.7, executionSettings.FrequencyPenalty); Assert.Equal(0.7, executionSettings.PresencePenalty); - Assert.Equal(2, executionSettings.ResultsPerPrompt); Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences); Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt); Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAITextToAudioExecutionSettingsTests.cs similarity index 98% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioExecutionSettingsTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAITextToAudioExecutionSettingsTests.cs index ea1b1adafae5..f30478e15acf 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/OpenAITextToAudioExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAITextToAudioExecutionSettingsTests.cs @@ -6,7 +6,7 @@ using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToAudio; +namespace SemanticKernel.Connectors.OpenAI.UniTests.Settings; /// /// Unit tests for class. diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt new file mode 100644 index 000000000000..be41c2eaf843 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_invalid_streaming_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{"content":"Test chat streaming response"},"logprobs":null,"finish_reason":null}]} + +data: {"id":}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json new file mode 100644 index 000000000000..737b972309ba --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_multiple_function_calls_test_response.json @@ -0,0 +1,64 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-GetCurrentWeather", + "arguments": "{\n\"location\": \"Boston, MA\"\n}" + } + }, + { + "id": "2", + "type": "function", + "function": { + "name": "MyPlugin-FunctionWithException", + "arguments": "{\n\"argument\": \"value\"\n}" + } + }, + { + "id": "3", + "type": "function", + "function": { + "name": "MyPlugin-NonExistentFunction", + "arguments": "{\n\"argument\": \"value\"\n}" + } + }, + { + "id": "4", + "type": "function", + "function": { + "name": "MyPlugin-InvalidArguments", + "arguments": "invalid_arguments_format" + } + }, + { + "id": "5", + "type": "function", + "function": { + "name": "MyPlugin-IntArguments", + "arguments": "{\n\"age\": 36\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_refusal_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_refusal_test_response.json new file mode 100644 index 000000000000..7ed7e188feeb --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_refusal_test_response.json @@ -0,0 +1,22 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1704208954, + "model": "gpt-4", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "refusal": "I'm sorry, I cannot assist with that request." + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + }, + "system_fingerprint": null +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json new file mode 100644 index 000000000000..6c93e434f259 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_single_function_call_test_response.json @@ -0,0 +1,32 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-GetCurrentWeather", + "arguments": "{\n\"location\": \"Boston, MA\"\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..ceb8f3e8b44b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,9 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-FunctionWithException","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":2,"id":"3","type":"function","function":{"name":"MyPlugin-NonExistentFunction","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":3,"id":"4","type":"function","function":{"name":"MyPlugin-InvalidArguments","arguments":"invalid_arguments_format"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_refusal_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_refusal_test_response.txt new file mode 100644 index 000000000000..06e07ba459dc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_refusal_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{"refusal":"I'm sorry, I cannot assist with that request."},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt new file mode 100644 index 000000000000..6835039941ce --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_single_function_call_test_response.txt @@ -0,0 +1,3 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt new file mode 100644 index 000000000000..ede04c1b9199 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_test_response.txt @@ -0,0 +1,7 @@ +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{"content":"Test chat streaming response"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]} + +data: {"id":"chatcmpl-96fqQVHGjG9Yzs4ZMB1K6nfy2oEoo","object":"chat.completion.chunk","created":1711377846,"model":"gpt-4-0125-preview","system_fingerprint":"fp_a7daf7c51e","choices":[],"usage":{"prompt_tokens":13,"completion_tokens":8,"total_tokens":21,"completion_tokens_details":{"reasoning_tokens":0}}} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json new file mode 100644 index 000000000000..b601bac8b55b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_test_response.json @@ -0,0 +1,22 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1704208954, + "model": "gpt-4", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "Test chat response" + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + }, + "system_fingerprint": null +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt new file mode 100644 index 000000000000..5e17403da9fc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_streaming_test_response.txt @@ -0,0 +1 @@ +data: {"id":"response-id","model":"","created":1684304924,"object":"chat.completion","choices":[{"index":0,"messages":[{"delta":{"role":"assistant","content":"Test chat with data streaming response"},"end_turn":false}]}]} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json new file mode 100644 index 000000000000..1d1d4e78b5bd --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_with_data_test_response.json @@ -0,0 +1,28 @@ +{ + "id": "response-id", + "model": "", + "created": 1684304924, + "object": "chat.completion", + "choices": [ + { + "index": 0, + "messages": [ + { + "role": "tool", + "content": "{\"citations\": [{\"content\": \"\\OpenAI AI services are cloud-based artificial intelligence (AI) services...\", \"id\": null, \"title\": \"What is OpenAI AI services\", \"filepath\": null, \"url\": null, \"metadata\": {\"chunking\": \"original document size=250. Scores=0.4314117431640625 and 1.72564697265625.Org Highlight count=4.\"}, \"chunk_id\": \"0\"}], \"intent\": \"[\\\"Learn about OpenAI AI services.\\\"]\"}", + "end_turn": false + }, + { + "role": "assistant", + "content": "Test chat with data response", + "end_turn": true + } + ] + } + ], + "usage": { + "prompt_tokens": 55, + "completion_tokens": 100, + "total_tokens": 155 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json new file mode 100644 index 000000000000..3ffa6b00cc3f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_multiple_function_calls_test_response.json @@ -0,0 +1,40 @@ +{ + "id": "response-id", + "object": "chat.completion", + "created": 1699896916, + "model": "gpt-3.5-turbo-0613", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "1", + "type": "function", + "function": { + "name": "MyPlugin-Function1", + "arguments": "{\n\"parameter\": \"function1-value\"\n}" + } + }, + { + "id": "2", + "type": "function", + "function": { + "name": "MyPlugin-Function2", + "arguments": "{\n\"parameter\": \"function2-value\"\n}" + } + } + ] + }, + "logprobs": null, + "finish_reason": "tool_calls" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 17, + "total_tokens": 99 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt new file mode 100644 index 000000000000..c8aeb98e8b82 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_streaming_multiple_function_calls_test_response.txt @@ -0,0 +1,5 @@ +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]} + +data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt new file mode 100644 index 000000000000..46a9581cf0cc --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-multiple-response.txt @@ -0,0 +1,20 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + }, + { + "object": "embedding", + "index": 1, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt new file mode 100644 index 000000000000..c715b851b78c --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-embeddings-response.txt @@ -0,0 +1,15 @@ +{ + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": "zcyMP83MDEAzM1NAzcyMQA==" + } + ], + "model": "text-embedding-ada-002", + "usage": { + "prompt_tokens": 7, + "total_tokens": 7 + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-b64_json-format-response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-b64_json-format-response.json new file mode 100644 index 000000000000..e004607fa8f0 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-b64_json-format-response.json @@ -0,0 +1,9 @@ +{ + "created": 1726234481, + "data": [ + { + "b64_json": "iVBORw0KGgoAAA==", + "revised_prompt": "my prompt" + } + ] +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.json new file mode 100644 index 000000000000..db96aba8f869 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/text-to-image-response.json @@ -0,0 +1,9 @@ +{ + "created": 1702575371, + "data": [ + { + "revised_prompt": "my prompt", + "url": "https://image-url/" + } + ] +} \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/ToolCallBehaviorTests.cs similarity index 73% rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs rename to dotnet/src/Connectors/Connectors.OpenAI.UnitTests/ToolCallBehaviorTests.cs index d39480ebfe8d..4bcbeaface05 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ToolCallBehaviorTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/ToolCallBehaviorTests.cs @@ -2,13 +2,13 @@ using System.Collections.Generic; using System.Linq; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; using Xunit; using static Microsoft.SemanticKernel.Connectors.OpenAI.ToolCallBehavior; -namespace SemanticKernel.Connectors.UnitTests.OpenAI; +namespace SemanticKernel.Connectors.OpenAI.UnitTests; /// /// Unit tests for @@ -24,6 +24,7 @@ public void EnableKernelFunctionsReturnsCorrectKernelFunctionsInstance() // Assert Assert.IsType(behavior); Assert.Equal(0, behavior.MaximumAutoInvokeAttempts); + Assert.Equal($"{nameof(KernelFunctions)}(autoInvoke:{behavior.MaximumAutoInvokeAttempts != 0})", behavior.ToString()); } [Fact] @@ -47,6 +48,7 @@ public void EnableFunctionsReturnsEnabledFunctionsInstance() // Assert Assert.IsType(behavior); + Assert.Contains($"{nameof(EnabledFunctions)}(autoInvoke:{behavior.MaximumAutoInvokeAttempts != 0})", behavior.ToString()); } [Fact] @@ -57,6 +59,7 @@ public void RequireFunctionReturnsRequiredFunctionInstance() // Assert Assert.IsType(behavior); + Assert.Contains($"{nameof(RequiredFunction)}(autoInvoke:{behavior.MaximumAutoInvokeAttempts != 0})", behavior.ToString()); } [Fact] @@ -64,13 +67,13 @@ public void KernelFunctionsConfigureOptionsWithNullKernelDoesNotAddTools() { // Arrange var kernelFunctions = new KernelFunctions(autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act - kernelFunctions.ConfigureOptions(null, chatCompletionsOptions); + var options = kernelFunctions.ConfigureOptions(null); // Assert - Assert.Empty(chatCompletionsOptions.Tools); + Assert.Null(options.Choice); + Assert.Null(options.Tools); } [Fact] @@ -78,15 +81,14 @@ public void KernelFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools() { // Arrange var kernelFunctions = new KernelFunctions(autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); // Act - kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + var options = kernelFunctions.ConfigureOptions(kernel); // Assert - Assert.Null(chatCompletionsOptions.ToolChoice); - Assert.Empty(chatCompletionsOptions.Tools); + Assert.Null(options.Choice); + Assert.Null(options.Tools); } [Fact] @@ -94,7 +96,6 @@ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools() { // Arrange var kernelFunctions = new KernelFunctions(autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); var plugin = this.GetTestPlugin(); @@ -102,12 +103,11 @@ public void KernelFunctionsConfigureOptionsWithFunctionsAddsTools() kernel.Plugins.Add(plugin); // Act - kernelFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + var options = kernelFunctions.ConfigureOptions(kernel); // Assert - Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice); - - this.AssertTools(chatCompletionsOptions); + Assert.NotNull(options.Choice); + this.AssertTools(options.Tools); } [Fact] @@ -115,14 +115,13 @@ public void EnabledFunctionsConfigureOptionsWithoutFunctionsDoesNotAddTools() { // Arrange var enabledFunctions = new EnabledFunctions([], autoInvoke: false); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act - enabledFunctions.ConfigureOptions(null, chatCompletionsOptions); + var options = enabledFunctions.ConfigureOptions(null); // Assert - Assert.Null(chatCompletionsOptions.ToolChoice); - Assert.Empty(chatCompletionsOptions.Tools); + Assert.Null(options.Choice); + Assert.Null(options.Tools); } [Fact] @@ -131,10 +130,9 @@ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsExc // Arrange var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act & Assert - var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null, chatCompletionsOptions)); + var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(null)); Assert.Equal($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided.", exception.Message); } @@ -144,11 +142,10 @@ public void EnabledFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsEx // Arrange var functions = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); var enabledFunctions = new EnabledFunctions(functions, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); // Act & Assert - var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions)); + var exception = Assert.Throws(() => enabledFunctions.ConfigureOptions(kernel)); Assert.Equal($"The specified {nameof(EnabledFunctions)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message); } @@ -161,18 +158,16 @@ public void EnabledFunctionsConfigureOptionsWithKernelAndPluginsAddsTools(bool a var plugin = this.GetTestPlugin(); var functions = plugin.GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()); var enabledFunctions = new EnabledFunctions(functions, autoInvoke); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); kernel.Plugins.Add(plugin); // Act - enabledFunctions.ConfigureOptions(kernel, chatCompletionsOptions); + var options = enabledFunctions.ConfigureOptions(kernel); // Assert - Assert.Equal(ChatCompletionsToolChoice.Auto, chatCompletionsOptions.ToolChoice); - - this.AssertTools(chatCompletionsOptions); + Assert.NotNull(options.Choice); + this.AssertTools(options.Tools); } [Fact] @@ -181,10 +176,9 @@ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndNullKernelThrowsEx // Arrange var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()).First(); var requiredFunction = new RequiredFunction(function, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); // Act & Assert - var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null, chatCompletionsOptions)); + var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(null)); Assert.Equal($"Auto-invocation with {nameof(RequiredFunction)} is not supported when no kernel is provided.", exception.Message); } @@ -194,11 +188,10 @@ public void RequiredFunctionsConfigureOptionsWithAutoInvokeAndEmptyKernelThrowsE // Arrange var function = this.GetTestPlugin().GetFunctionsMetadata().Select(function => function.ToOpenAIFunction()).First(); var requiredFunction = new RequiredFunction(function, autoInvoke: true); - var chatCompletionsOptions = new ChatCompletionsOptions(); var kernel = Kernel.CreateBuilder().Build(); // Act & Assert - var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions)); + var exception = Assert.Throws(() => requiredFunction.ConfigureOptions(kernel)); Assert.Equal($"The specified {nameof(RequiredFunction)} function MyPlugin-MyFunction is not available in the kernel.", exception.Message); } @@ -208,18 +201,17 @@ public void RequiredFunctionConfigureOptionsAddsTools() // Arrange var plugin = this.GetTestPlugin(); var function = plugin.GetFunctionsMetadata()[0].ToOpenAIFunction(); - var chatCompletionsOptions = new ChatCompletionsOptions(); var requiredFunction = new RequiredFunction(function, autoInvoke: true); var kernel = new Kernel(); kernel.Plugins.Add(plugin); // Act - requiredFunction.ConfigureOptions(kernel, chatCompletionsOptions); + var options = requiredFunction.ConfigureOptions(kernel); // Assert - Assert.NotNull(chatCompletionsOptions.ToolChoice); + Assert.NotNull(options.Choice); - this.AssertTools(chatCompletionsOptions); + this.AssertTools(options.Tools); } private KernelPlugin GetTestPlugin() @@ -234,16 +226,15 @@ private KernelPlugin GetTestPlugin() return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]); } - private void AssertTools(ChatCompletionsOptions chatCompletionsOptions) + private void AssertTools(IList? tools) { - Assert.Single(chatCompletionsOptions.Tools); - - var tool = chatCompletionsOptions.Tools[0] as ChatCompletionsFunctionToolDefinition; + Assert.NotNull(tools); + var tool = Assert.Single(tools); Assert.NotNull(tool); - Assert.Equal("MyPlugin-MyFunction", tool.Name); - Assert.Equal("Test Function", tool.Description); - Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.Parameters.ToString()); + Assert.Equal("MyPlugin-MyFunction", tool.FunctionName); + Assert.Equal("Test Function", tool.FunctionDescription); + Assert.Equal("{\"type\":\"object\",\"required\":[],\"properties\":{\"parameter1\":{\"type\":\"string\"},\"parameter2\":{\"type\":\"string\"}}}", tool.FunctionParameters.ToString()); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs deleted file mode 100644 index be0428faa799..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIClientCore.cs +++ /dev/null @@ -1,102 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Core implementation for Azure OpenAI clients, providing common functionality and properties. -/// -internal sealed class AzureOpenAIClientCore : ClientCore -{ - /// - /// Gets the key used to store the deployment name in the dictionary. - /// - public static string DeploymentNameKey => "DeploymentName"; - - /// - /// OpenAI / Azure OpenAI Client - /// - internal override OpenAIClient Client { get; } - - /// - /// Initializes a new instance of the class using API Key authentication. - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAIClientCore( - string deploymentName, - string endpoint, - string apiKey, - HttpClient? httpClient = null, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - Verify.NotNullOrWhiteSpace(apiKey); - - var options = GetOpenAIClientOptions(httpClient); - - this.DeploymentOrModelName = deploymentName; - this.Endpoint = new Uri(endpoint); - this.Client = new OpenAIClient(this.Endpoint, new AzureKeyCredential(apiKey), options); - } - - /// - /// Initializes a new instance of the class supporting AAD authentication. - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credential, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAIClientCore( - string deploymentName, - string endpoint, - TokenCredential credential, - HttpClient? httpClient = null, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - - var options = GetOpenAIClientOptions(httpClient); - - this.DeploymentOrModelName = deploymentName; - this.Endpoint = new Uri(endpoint); - this.Client = new OpenAIClient(this.Endpoint, credential, options); - } - - /// - /// Initializes a new instance of the class using the specified OpenAIClient. - /// Note: instances created this way might not have the default diagnostics settings, - /// it's up to the caller to configure the client. - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAIClientCore( - string deploymentName, - OpenAIClient openAIClient, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNull(openAIClient); - - this.DeploymentOrModelName = deploymentName; - this.Client = openAIClient; - - this.AddAttribute(DeploymentNameKey, deploymentName); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs deleted file mode 100644 index dd02ddd0ebee..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAITextToAudioClient.cs +++ /dev/null @@ -1,141 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI text-to-audio client for HTTP operations. -/// -[Experimental("SKEXP0001")] -internal sealed class AzureOpenAITextToAudioClient -{ - private readonly ILogger _logger; - private readonly HttpClient _httpClient; - - private readonly string _deploymentName; - private readonly string _endpoint; - private readonly string _apiKey; - private readonly string? _modelId; - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Creates an instance of the with API key auth. - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal AzureOpenAITextToAudioClient( - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'"); - Verify.NotNullOrWhiteSpace(apiKey); - - this._deploymentName = deploymentName; - this._endpoint = endpoint; - this._apiKey = apiKey; - this._modelId = modelId; - - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = logger ?? NullLogger.Instance; - } - - internal async Task> GetAudioContentsAsync( - string text, - PromptExecutionSettings? executionSettings, - CancellationToken cancellationToken) - { - OpenAITextToAudioExecutionSettings? audioExecutionSettings = OpenAITextToAudioExecutionSettings.FromExecutionSettings(executionSettings); - - Verify.NotNullOrWhiteSpace(audioExecutionSettings?.Voice); - - var modelId = this.GetModelId(audioExecutionSettings); - - using var request = this.GetRequest(text, modelId, audioExecutionSettings); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - var data = await response.Content.ReadAsByteArrayAndTranslateExceptionAsync().ConfigureAwait(false); - - return [new(data, modelId)]; - } - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - #region private - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken) - { - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add("Api-Key", this._apiKey); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureOpenAITextToAudioClient))); - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on text-to-audio request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private HttpRequestMessage GetRequest(string text, string modelId, OpenAITextToAudioExecutionSettings executionSettings) - { - const string DefaultApiVersion = "2024-02-15-preview"; - - var baseUrl = !string.IsNullOrWhiteSpace(this._httpClient.BaseAddress?.AbsoluteUri) ? - this._httpClient.BaseAddress!.AbsoluteUri : - this._endpoint; - - var requestUrl = $"openai/deployments/{this._deploymentName}/audio/speech?api-version={DefaultApiVersion}"; - - var payload = new TextToAudioRequest(modelId, text, executionSettings.Voice) - { - ResponseFormat = executionSettings.ResponseFormat, - Speed = executionSettings.Speed - }; - - return HttpRequest.CreatePostRequest($"{baseUrl.TrimEnd('/')}/{requestUrl}", payload); - } - - private string GetModelId(OpenAITextToAudioExecutionSettings executionSettings) - { - return - !string.IsNullOrWhiteSpace(this._modelId) ? this._modelId! : - !string.IsNullOrWhiteSpace(executionSettings.ModelId) ? executionSettings.ModelId! : - this._deploymentName; - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs deleted file mode 100644 index 594b420bc5f2..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContent.cs +++ /dev/null @@ -1,69 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI specialized with data chat message content -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public sealed class AzureOpenAIWithDataChatMessageContent : ChatMessageContent -{ - /// - /// Content from data source, including citations. - /// For more information see . - /// - public string? ToolContent { get; set; } - - /// - /// Initializes a new instance of the class. - /// - /// Azure Chat With Data Choice - /// The model ID used to generate the content - /// Additional metadata - internal AzureOpenAIWithDataChatMessageContent(ChatWithDataChoice chatChoice, string? modelId, IReadOnlyDictionary? metadata = null) - : base(default, string.Empty, modelId, chatChoice, System.Text.Encoding.UTF8, CreateMetadataDictionary(metadata)) - { - // An assistant message content must be present, otherwise the chat is not valid. - var chatMessage = chatChoice.Messages.FirstOrDefault(m => string.Equals(m.Role, AuthorRole.Assistant.Label, StringComparison.OrdinalIgnoreCase)) ?? - throw new ArgumentException("Chat is not valid. Chat message does not contain any messages with 'assistant' role."); - - this.Content = chatMessage.Content; - this.Role = new AuthorRole(chatMessage.Role); - - this.ToolContent = chatChoice.Messages.FirstOrDefault(message => message.Role.Equals(AuthorRole.Tool.Label, StringComparison.OrdinalIgnoreCase))?.Content; - ((Dictionary)this.Metadata!).Add(nameof(this.ToolContent), this.ToolContent); - } - - private static Dictionary CreateMetadataDictionary(IReadOnlyDictionary? metadata) - { - Dictionary newDictionary; - if (metadata is null) - { - // There's no existing metadata to clone; just allocate a new dictionary. - newDictionary = new Dictionary(1); - } - else if (metadata is IDictionary origMutable) - { - // Efficiently clone the old dictionary to a new one. - newDictionary = new Dictionary(origMutable); - } - else - { - // There's metadata to clone but we have to do so one item at a time. - newDictionary = new Dictionary(metadata.Count + 1); - foreach (var kvp in metadata) - { - newDictionary[kvp.Key] = kvp.Value; - } - } - - return newDictionary; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs deleted file mode 100644 index ebe57f446293..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContent.cs +++ /dev/null @@ -1,49 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Text; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure Open AI WithData Specialized streaming chat message content. -/// -/// -/// Represents a chat message content chunk that was streamed from the remote model. -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public sealed class AzureOpenAIWithDataStreamingChatMessageContent : StreamingChatMessageContent -{ - /// - public string? FunctionName { get; set; } - - /// - public string? FunctionArgument { get; set; } - - /// - /// Create a new instance of the class. - /// - /// Azure message update representation from WithData apis - /// Index of the choice - /// The model ID used to generate the content - /// Additional metadata - internal AzureOpenAIWithDataStreamingChatMessageContent(ChatWithDataStreamingChoice choice, int choiceIndex, string modelId, IReadOnlyDictionary? metadata = null) : - base(AuthorRole.Assistant, null, choice, choiceIndex, modelId, Encoding.UTF8, metadata) - { - var message = choice.Messages.FirstOrDefault(this.IsValidMessage); - var messageContent = message?.Delta?.Content; - - this.Content = messageContent; - } - - private bool IsValidMessage(ChatWithDataStreamingMessage message) - { - return !message.EndTurn && - (message.Delta.Role is null || !message.Delta.Role.Equals(AuthorRole.Tool.Label, StringComparison.Ordinal)); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs deleted file mode 100644 index 6cfcf4e3e459..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs +++ /dev/null @@ -1,1591 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Diagnostics; -using Microsoft.SemanticKernel.Http; - -#pragma warning disable CA2208 // Instantiate argument exceptions correctly - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Base class for AI clients that provides common functionality for interacting with OpenAI services. -/// -internal abstract class ClientCore -{ - private const string ModelProvider = "openai"; - private const int MaxResultsPerPrompt = 128; - - /// - /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current - /// asynchronous chain of execution. - /// - /// - /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that - /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself, - /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close - /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution. - /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in - /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that - /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that, - /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent - /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made - /// configurable should need arise. - /// - private const int MaxInflightAutoInvokes = 128; - - /// Singleton tool used when tool call count drops to 0 but we need to supply tools to keep the service happy. - private static readonly ChatCompletionsFunctionToolDefinition s_nonInvocableFunctionTool = new() { Name = "NonInvocableTool" }; - - /// Tracking for . - private static readonly AsyncLocal s_inflightAutoInvokes = new(); - - internal ClientCore(ILogger? logger = null) - { - this.Logger = logger ?? NullLogger.Instance; - } - - /// - /// Model Id or Deployment Name - /// - internal string DeploymentOrModelName { get; set; } = string.Empty; - - /// - /// OpenAI / Azure OpenAI Client - /// - internal abstract OpenAIClient Client { get; } - - internal Uri? Endpoint { get; set; } = null; - - /// - /// Logger instance - /// - internal ILogger Logger { get; set; } - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Instance of for metrics. - /// - private static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI"); - - /// - /// Instance of to keep track of the number of prompt tokens used. - /// - private static readonly Counter s_promptTokensCounter = - s_meter.CreateCounter( - name: "semantic_kernel.connectors.openai.tokens.prompt", - unit: "{token}", - description: "Number of prompt tokens used"); - - /// - /// Instance of to keep track of the number of completion tokens used. - /// - private static readonly Counter s_completionTokensCounter = - s_meter.CreateCounter( - name: "semantic_kernel.connectors.openai.tokens.completion", - unit: "{token}", - description: "Number of completion tokens used"); - - /// - /// Instance of to keep track of the total number of tokens used. - /// - private static readonly Counter s_totalTokensCounter = - s_meter.CreateCounter( - name: "semantic_kernel.connectors.openai.tokens.total", - unit: "{token}", - description: "Number of tokens used"); - - /// - /// Creates completions for the prompt and settings. - /// - /// The prompt to complete. - /// Execution settings for the completion API. - /// The containing services, plugins, and other state for use throughout the operation. - /// The to monitor for cancellation requests. The default is . - /// Completions generated by the remote model - internal async Task> GetTextResultsAsync( - string prompt, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings textExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); - - ValidateMaxTokens(textExecutionSettings.MaxTokens); - - var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName); - - Completions? responseData = null; - List responseContent; - using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings)) - { - try - { - responseData = (await RunRequestAsync(() => this.Client.GetCompletionsAsync(options, cancellationToken)).ConfigureAwait(false)).Value; - if (responseData.Choices.Count == 0) - { - throw new KernelException("Text completions not found"); - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - if (responseData != null) - { - // Capture available metadata even if the operation failed. - activity - .SetResponseId(responseData.Id) - .SetPromptTokenUsage(responseData.Usage.PromptTokens) - .SetCompletionTokenUsage(responseData.Usage.CompletionTokens); - } - throw; - } - - responseContent = responseData.Choices.Select(choice => new TextContent(choice.Text, this.DeploymentOrModelName, choice, Encoding.UTF8, GetTextChoiceMetadata(responseData, choice))).ToList(); - activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens); - } - - this.LogUsage(responseData.Usage); - - return responseContent; - } - - internal async IAsyncEnumerable GetStreamingTextContentsAsync( - string prompt, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings textExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); - - ValidateMaxTokens(textExecutionSettings.MaxTokens); - - var options = CreateCompletionsOptions(prompt, textExecutionSettings, this.DeploymentOrModelName); - - using var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, prompt, textExecutionSettings); - - StreamingResponse response; - try - { - response = await RunRequestAsync(() => this.Client.GetCompletionsStreamingAsync(options, cancellationToken)).ConfigureAwait(false); - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); - List? streamedContents = activity is not null ? [] : null; - try - { - while (true) - { - try - { - if (!await responseEnumerator.MoveNextAsync()) - { - break; - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - Completions completions = responseEnumerator.Current; - foreach (Choice choice in completions.Choices) - { - var openAIStreamingTextContent = new OpenAIStreamingTextContent( - choice.Text, choice.Index, this.DeploymentOrModelName, choice, GetTextChoiceMetadata(completions, choice)); - streamedContents?.Add(openAIStreamingTextContent); - yield return openAIStreamingTextContent; - } - } - } - finally - { - activity?.EndStreaming(streamedContents); - await responseEnumerator.DisposeAsync(); - } - } - - private static Dictionary GetTextChoiceMetadata(Completions completions, Choice choice) - { - return new Dictionary(8) - { - { nameof(completions.Id), completions.Id }, - { nameof(completions.Created), completions.Created }, - { nameof(completions.PromptFilterResults), completions.PromptFilterResults }, - { nameof(completions.Usage), completions.Usage }, - { nameof(choice.ContentFilterResults), choice.ContentFilterResults }, - - // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. - { nameof(choice.FinishReason), choice.FinishReason?.ToString() }, - - { nameof(choice.LogProbabilityModel), choice.LogProbabilityModel }, - { nameof(choice.Index), choice.Index }, - }; - } - - private static Dictionary GetChatChoiceMetadata(ChatCompletions completions, ChatChoice chatChoice) - { - return new Dictionary(12) - { - { nameof(completions.Id), completions.Id }, - { nameof(completions.Created), completions.Created }, - { nameof(completions.PromptFilterResults), completions.PromptFilterResults }, - { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, - { nameof(completions.Usage), completions.Usage }, - { nameof(chatChoice.ContentFilterResults), chatChoice.ContentFilterResults }, - - // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. - { nameof(chatChoice.FinishReason), chatChoice.FinishReason?.ToString() }, - - { nameof(chatChoice.FinishDetails), chatChoice.FinishDetails }, - { nameof(chatChoice.LogProbabilityInfo), chatChoice.LogProbabilityInfo }, - { nameof(chatChoice.Index), chatChoice.Index }, - { nameof(chatChoice.Enhancements), chatChoice.Enhancements }, - }; - } - - private static Dictionary GetResponseMetadata(StreamingChatCompletionsUpdate completions) - { - return new Dictionary(4) - { - { nameof(completions.Id), completions.Id }, - { nameof(completions.Created), completions.Created }, - { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, - - // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. - { nameof(completions.FinishReason), completions.FinishReason?.ToString() }, - }; - } - - private static Dictionary GetResponseMetadata(AudioTranscription audioTranscription) - { - return new Dictionary(3) - { - { nameof(audioTranscription.Language), audioTranscription.Language }, - { nameof(audioTranscription.Duration), audioTranscription.Duration }, - { nameof(audioTranscription.Segments), audioTranscription.Segments } - }; - } - - /// - /// Generates an embedding from the given . - /// - /// List of strings to generate embeddings for - /// The containing services, plugins, and other state for use throughout the operation. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The to monitor for cancellation requests. The default is . - /// List of embeddings - internal async Task>> GetEmbeddingsAsync( - IList data, - Kernel? kernel, - int? dimensions, - CancellationToken cancellationToken) - { - var result = new List>(data.Count); - - if (data.Count > 0) - { - var embeddingsOptions = new EmbeddingsOptions(this.DeploymentOrModelName, data) - { - Dimensions = dimensions - }; - - var response = await RunRequestAsync(() => this.Client.GetEmbeddingsAsync(embeddingsOptions, cancellationToken)).ConfigureAwait(false); - var embeddings = response.Value.Data; - - if (embeddings.Count != data.Count) - { - throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}"); - } - - for (var i = 0; i < embeddings.Count; i++) - { - result.Add(embeddings[i].Embedding); - } - } - - return result; - } - - internal async Task> GetTextContentFromAudioAsync( - AudioContent content, - PromptExecutionSettings? executionSettings, - CancellationToken cancellationToken) - { - Verify.NotNull(content.Data); - var audioData = content.Data.Value; - if (audioData.IsEmpty) - { - throw new ArgumentException("Audio data cannot be empty", nameof(content)); - } - - OpenAIAudioToTextExecutionSettings? audioExecutionSettings = OpenAIAudioToTextExecutionSettings.FromExecutionSettings(executionSettings); - - Verify.ValidFilename(audioExecutionSettings?.Filename); - - var audioOptions = new AudioTranscriptionOptions - { - AudioData = BinaryData.FromBytes(audioData), - DeploymentName = this.DeploymentOrModelName, - Filename = audioExecutionSettings.Filename, - Language = audioExecutionSettings.Language, - Prompt = audioExecutionSettings.Prompt, - ResponseFormat = audioExecutionSettings.ResponseFormat, - Temperature = audioExecutionSettings.Temperature - }; - - AudioTranscription responseData = (await RunRequestAsync(() => this.Client.GetAudioTranscriptionAsync(audioOptions, cancellationToken)).ConfigureAwait(false)).Value; - - return [new(responseData.Text, this.DeploymentOrModelName, metadata: GetResponseMetadata(responseData))]; - } - - /// - /// Generate a new chat message - /// - /// Chat history - /// Execution settings for the completion API. - /// The containing services, plugins, and other state for use throughout the operation. - /// Async cancellation token - /// Generated chat message in string format - internal async Task> GetChatMessageContentsAsync( - ChatHistory chat, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - // Convert the incoming execution settings to OpenAI settings. - OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; - ValidateMaxTokens(chatExecutionSettings.MaxTokens); - ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt); - - // Create the Azure SDK ChatCompletionOptions instance from all available information. - var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName); - - for (int requestIndex = 1; ; requestIndex++) - { - // Make the request. - ChatCompletions? responseData = null; - List responseContent; - using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings)) - { - try - { - responseData = (await RunRequestAsync(() => this.Client.GetChatCompletionsAsync(chatOptions, cancellationToken)).ConfigureAwait(false)).Value; - this.LogUsage(responseData.Usage); - if (responseData.Choices.Count == 0) - { - throw new KernelException("Chat completions not found"); - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - if (responseData != null) - { - // Capture available metadata even if the operation failed. - activity - .SetResponseId(responseData.Id) - .SetPromptTokenUsage(responseData.Usage.PromptTokens) - .SetCompletionTokenUsage(responseData.Usage.CompletionTokens); - } - throw; - } - - responseContent = responseData.Choices.Select(chatChoice => this.GetChatMessage(chatChoice, responseData)).ToList(); - activity?.SetCompletionResponse(responseContent, responseData.Usage.PromptTokens, responseData.Usage.CompletionTokens); - } - - // If we don't want to attempt to invoke any functions, just return the result. - // Or if we are auto-invoking but we somehow end up with other than 1 choice even though only 1 was requested, similarly bail. - if (!autoInvoke || responseData.Choices.Count != 1) - { - return responseContent; - } - - Debug.Assert(kernel is not null); - - // Get our single result and extract the function call information. If this isn't a function call, or if it is - // but we're unable to find the function or extract the relevant information, just return the single result. - // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service - // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool - // is specified. - ChatChoice resultChoice = responseData.Choices[0]; - OpenAIChatMessageContent result = this.GetChatMessage(resultChoice, responseData); - if (result.ToolCalls.Count == 0) - { - return [result]; - } - - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Tool requests: {Requests}", result.ToolCalls.Count); - } - if (this.Logger.IsEnabled(LogLevel.Trace)) - { - this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", result.ToolCalls.OfType().Select(ftc => $"{ftc.Name}({ftc.Arguments})"))); - } - - // Add the original assistant message to the chatOptions; this is required for the service - // to understand the tool call responses. Also add the result message to the caller's chat - // history: if they don't want it, they can remove it, but this makes the data available, - // including metadata like usage. - chatOptions.Messages.Add(GetRequestMessage(resultChoice.Message)); - chat.Add(result); - - // We must send back a response for every tool call, regardless of whether we successfully executed it or not. - // If we successfully execute it, we'll add the result. If we don't, we'll add an error. - for (int toolCallIndex = 0; toolCallIndex < result.ToolCalls.Count; toolCallIndex++) - { - ChatCompletionsToolCall toolCall = result.ToolCalls[toolCallIndex]; - - // We currently only know about function tool calls. If it's anything else, we'll respond with an error. - if (toolCall is not ChatCompletionsFunctionToolCall functionToolCall) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger); - continue; - } - - // Parse the function call arguments. - OpenAIFunctionToolCall? openAIFunctionToolCall; - try - { - openAIFunctionToolCall = new(functionToolCall); - } - catch (JsonException) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger); - continue; - } - - // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, - // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able - // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. - if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && - !IsRequestableTool(chatOptions, openAIFunctionToolCall)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger); - continue; - } - - // Find the function in the kernel and populate the arguments. - if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger); - continue; - } - - // Now, invoke the function, and add the resulting tool call message to the chat options. - FunctionResult functionResult = new(function) { Culture = kernel.Culture }; - AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat, result) - { - ToolCallId = toolCall.Id, - Arguments = functionArgs, - RequestSequenceIndex = requestIndex - 1, - FunctionSequenceIndex = toolCallIndex, - FunctionCount = result.ToolCalls.Count, - CancellationToken = cancellationToken - }; - - s_inflightAutoInvokes.Value++; - try - { - invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => - { - // Check if filter requested termination. - if (context.Terminate) - { - return; - } - - // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any - // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, - // as the called function could in turn telling the model about itself as a possible candidate for invocation. - context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); - }).ConfigureAwait(false); - } -#pragma warning disable CA1031 // Do not catch general exception types - catch (Exception e) -#pragma warning restore CA1031 // Do not catch general exception types - { - AddResponseMessage(chatOptions, chat, null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger); - continue; - } - finally - { - s_inflightAutoInvokes.Value--; - } - - // Apply any changes from the auto function invocation filters context to final result. - functionResult = invocationContext.Result; - - object functionResultValue = functionResult.GetValue() ?? string.Empty; - var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior); - - AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, functionToolCall, this.Logger); - - // If filter requested termination, returning latest function result. - if (invocationContext.Terminate) - { - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Filter requested termination of automatic function invocation."); - } - - return [chat.Last()]; - } - } - - // Update tool use information for the next go-around based on having completed another iteration. - Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null); - - // Set the tool choice to none. If we end up wanting to use tools, we'll reset it to the desired value. - chatOptions.ToolChoice = ChatCompletionsToolChoice.None; - chatOptions.Tools.Clear(); - - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts) - { - // Don't add any tools as we've reached the maximum attempts limit. - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts); - } - } - else - { - // Regenerate the tool list as necessary. The invocation of the function(s) could have augmented - // what functions are available in the kernel. - chatExecutionSettings.ToolCallBehavior.ConfigureOptions(kernel, chatOptions); - } - - // Having already sent tools and with tool call information in history, the service can become unhappy ("[] is too short - 'tools'") - // if we don't send any tools in subsequent requests, even if we say not to use any. - if (chatOptions.ToolChoice == ChatCompletionsToolChoice.None) - { - Debug.Assert(chatOptions.Tools.Count == 0); - chatOptions.Tools.Add(s_nonInvocableFunctionTool); - } - - // Disable auto invocation if we've exceeded the allowed limit. - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts) - { - autoInvoke = false; - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); - } - } - } - } - - internal async IAsyncEnumerable GetStreamingChatMessageContentsAsync( - ChatHistory chat, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - Verify.NotNull(chat); - - OpenAIPromptExecutionSettings chatExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - - ValidateMaxTokens(chatExecutionSettings.MaxTokens); - - bool autoInvoke = kernel is not null && chatExecutionSettings.ToolCallBehavior?.MaximumAutoInvokeAttempts > 0 && s_inflightAutoInvokes.Value < MaxInflightAutoInvokes; - ValidateAutoInvoke(autoInvoke, chatExecutionSettings.ResultsPerPrompt); - - var chatOptions = this.CreateChatCompletionsOptions(chatExecutionSettings, chat, kernel, this.DeploymentOrModelName); - - StringBuilder? contentBuilder = null; - Dictionary? toolCallIdsByIndex = null; - Dictionary? functionNamesByIndex = null; - Dictionary? functionArgumentBuildersByIndex = null; - - for (int requestIndex = 1; ; requestIndex++) - { - // Reset state - contentBuilder?.Clear(); - toolCallIdsByIndex?.Clear(); - functionNamesByIndex?.Clear(); - functionArgumentBuildersByIndex?.Clear(); - - // Stream the response. - IReadOnlyDictionary? metadata = null; - string? streamedName = null; - ChatRole? streamedRole = default; - CompletionsFinishReason finishReason = default; - ChatCompletionsFunctionToolCall[]? toolCalls = null; - FunctionCallContent[]? functionCallContents = null; - - using (var activity = ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.DeploymentOrModelName, ModelProvider, chat, chatExecutionSettings)) - { - // Make the request. - StreamingResponse response; - try - { - response = await RunRequestAsync(() => this.Client.GetChatCompletionsStreamingAsync(chatOptions, cancellationToken)).ConfigureAwait(false); - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); - List? streamedContents = activity is not null ? [] : null; - try - { - while (true) - { - try - { - if (!await responseEnumerator.MoveNextAsync()) - { - break; - } - } - catch (Exception ex) when (activity is not null) - { - activity.SetError(ex); - throw; - } - - StreamingChatCompletionsUpdate update = responseEnumerator.Current; - metadata = GetResponseMetadata(update); - streamedRole ??= update.Role; - streamedName ??= update.AuthorName; - finishReason = update.FinishReason ?? default; - - // If we're intending to invoke function calls, we need to consume that function call information. - if (autoInvoke) - { - if (update.ContentUpdate is { Length: > 0 } contentUpdate) - { - (contentBuilder ??= new()).Append(contentUpdate); - } - - OpenAIFunctionToolCall.TrackStreamingToolingUpdate(update.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); - } - - AuthorRole? role = null; - if (streamedRole.HasValue) - { - role = new AuthorRole(streamedRole.Value.ToString()); - } - - OpenAIStreamingChatMessageContent openAIStreamingChatMessageContent = - new(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata) - { - AuthorName = streamedName, - Role = role, - }; - - if (update.ToolCallUpdate is StreamingFunctionToolCallUpdate functionCallUpdate) - { - openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent( - callId: functionCallUpdate.Id, - name: functionCallUpdate.Name, - arguments: functionCallUpdate.ArgumentsUpdate, - functionCallIndex: functionCallUpdate.ToolCallIndex)); - } - - streamedContents?.Add(openAIStreamingChatMessageContent); - yield return openAIStreamingChatMessageContent; - } - - // Translate all entries into ChatCompletionsFunctionToolCall instances. - toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( - ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); - - // Translate all entries into FunctionCallContent instances for diagnostics purposes. - functionCallContents = this.GetFunctionCallContents(toolCalls).ToArray(); - } - finally - { - activity?.EndStreaming(streamedContents, ModelDiagnostics.IsSensitiveEventsEnabled() ? functionCallContents : null); - await responseEnumerator.DisposeAsync(); - } - } - - // If we don't have a function to invoke, we're done. - // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service - // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool - // is specified. - if (!autoInvoke || - toolCallIdsByIndex is not { Count: > 0 }) - { - yield break; - } - - // Get any response content that was streamed. - string content = contentBuilder?.ToString() ?? string.Empty; - - // Log the requests - if (this.Logger.IsEnabled(LogLevel.Trace)) - { - this.Logger.LogTrace("Function call requests: {Requests}", string.Join(", ", toolCalls.Select(fcr => $"{fcr.Name}({fcr.Arguments})"))); - } - else if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Function call requests: {Requests}", toolCalls.Length); - } - - // Add the original assistant message to the chatOptions; this is required for the service - // to understand the tool call responses. - chatOptions.Messages.Add(GetRequestMessage(streamedRole ?? default, content, streamedName, toolCalls)); - - var chatMessageContent = this.GetChatMessage(streamedRole ?? default, content, toolCalls, functionCallContents, metadata, streamedName); - chat.Add(chatMessageContent); - - // Respond to each tooling request. - for (int toolCallIndex = 0; toolCallIndex < toolCalls.Length; toolCallIndex++) - { - ChatCompletionsFunctionToolCall toolCall = toolCalls[toolCallIndex]; - - // We currently only know about function tool calls. If it's anything else, we'll respond with an error. - if (string.IsNullOrEmpty(toolCall.Name)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger); - continue; - } - - // Parse the function call arguments. - OpenAIFunctionToolCall? openAIFunctionToolCall; - try - { - openAIFunctionToolCall = new(toolCall); - } - catch (JsonException) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger); - continue; - } - - // Make sure the requested function is one we requested. If we're permitting any kernel function to be invoked, - // then we don't need to check this, as it'll be handled when we look up the function in the kernel to be able - // to invoke it. If we're permitting only a specific list of functions, though, then we need to explicitly check. - if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true && - !IsRequestableTool(chatOptions, openAIFunctionToolCall)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger); - continue; - } - - // Find the function in the kernel and populate the arguments. - if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs)) - { - AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger); - continue; - } - - // Now, invoke the function, and add the resulting tool call message to the chat options. - FunctionResult functionResult = new(function) { Culture = kernel.Culture }; - AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chat, chatMessageContent) - { - ToolCallId = toolCall.Id, - Arguments = functionArgs, - RequestSequenceIndex = requestIndex - 1, - FunctionSequenceIndex = toolCallIndex, - FunctionCount = toolCalls.Length, - CancellationToken = cancellationToken - }; - - s_inflightAutoInvokes.Value++; - try - { - invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => - { - // Check if filter requested termination. - if (context.Terminate) - { - return; - } - - // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any - // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, - // as the called function could in turn telling the model about itself as a possible candidate for invocation. - context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); - }).ConfigureAwait(false); - } -#pragma warning disable CA1031 // Do not catch general exception types - catch (Exception e) -#pragma warning restore CA1031 // Do not catch general exception types - { - AddResponseMessage(chatOptions, chat, result: null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger); - continue; - } - finally - { - s_inflightAutoInvokes.Value--; - } - - // Apply any changes from the auto function invocation filters context to final result. - functionResult = invocationContext.Result; - - object functionResultValue = functionResult.GetValue() ?? string.Empty; - var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior); - - AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, toolCall, this.Logger); - - // If filter requested termination, returning latest function result and breaking request iteration loop. - if (invocationContext.Terminate) - { - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Filter requested termination of automatic function invocation."); - } - - var lastChatMessage = chat.Last(); - - yield return new OpenAIStreamingChatMessageContent(lastChatMessage.Role, lastChatMessage.Content); - yield break; - } - } - - // Update tool use information for the next go-around based on having completed another iteration. - Debug.Assert(chatExecutionSettings.ToolCallBehavior is not null); - - // Set the tool choice to none. If we end up wanting to use tools, we'll reset it to the desired value. - chatOptions.ToolChoice = ChatCompletionsToolChoice.None; - chatOptions.Tools.Clear(); - - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts) - { - // Don't add any tools as we've reached the maximum attempts limit. - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum use ({MaximumUse}) reached; removing the tool.", chatExecutionSettings.ToolCallBehavior!.MaximumUseAttempts); - } - } - else - { - // Regenerate the tool list as necessary. The invocation of the function(s) could have augmented - // what functions are available in the kernel. - chatExecutionSettings.ToolCallBehavior.ConfigureOptions(kernel, chatOptions); - } - - // Having already sent tools and with tool call information in history, the service can become unhappy ("[] is too short - 'tools'") - // if we don't send any tools in subsequent requests, even if we say not to use any. - if (chatOptions.ToolChoice == ChatCompletionsToolChoice.None) - { - Debug.Assert(chatOptions.Tools.Count == 0); - chatOptions.Tools.Add(s_nonInvocableFunctionTool); - } - - // Disable auto invocation if we've exceeded the allowed limit. - if (requestIndex >= chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts) - { - autoInvoke = false; - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", chatExecutionSettings.ToolCallBehavior!.MaximumAutoInvokeAttempts); - } - } - } - } - - /// Checks if a tool call is for a function that was defined. - private static bool IsRequestableTool(ChatCompletionsOptions options, OpenAIFunctionToolCall ftc) - { - IList tools = options.Tools; - for (int i = 0; i < tools.Count; i++) - { - if (tools[i] is ChatCompletionsFunctionToolDefinition def && - string.Equals(def.Name, ftc.FullyQualifiedName, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - return false; - } - - internal async IAsyncEnumerable GetChatAsTextStreamingContentsAsync( - string prompt, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings chatSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - ChatHistory chat = CreateNewChat(prompt, chatSettings); - - await foreach (var chatUpdate in this.GetStreamingChatMessageContentsAsync(chat, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) - { - yield return new StreamingTextContent(chatUpdate.Content, chatUpdate.ChoiceIndex, chatUpdate.ModelId, chatUpdate, Encoding.UTF8, chatUpdate.Metadata); - } - } - - internal async Task> GetChatAsTextContentsAsync( - string text, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings chatSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); - - ChatHistory chat = CreateNewChat(text, chatSettings); - return (await this.GetChatMessageContentsAsync(chat, chatSettings, kernel, cancellationToken).ConfigureAwait(false)) - .Select(chat => new TextContent(chat.Content, chat.ModelId, chat.Content, Encoding.UTF8, chat.Metadata)) - .ToList(); - } - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - /// Gets options to use for an OpenAIClient - /// Custom for HTTP requests. - /// Optional API version. - /// An instance of . - internal static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient, OpenAIClientOptions.ServiceVersion? serviceVersion = null) - { - OpenAIClientOptions options = serviceVersion is not null ? - new(serviceVersion.Value) : - new(); - - options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent; - options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), HttpPipelinePosition.PerCall); - - if (httpClient is not null) - { - options.Transport = new HttpClientTransport(httpClient); - options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided. - options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout - } - - return options; - } - - /// - /// Create a new empty chat instance - /// - /// Optional chat instructions for the AI service - /// Execution settings - /// Chat object - private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null) - { - var chat = new ChatHistory(); - - // If settings is not provided, create a new chat with the text as the system prompt - AuthorRole textRole = AuthorRole.System; - - if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt)) - { - chat.AddSystemMessage(executionSettings!.ChatSystemPrompt!); - textRole = AuthorRole.User; - } - - if (!string.IsNullOrWhiteSpace(text)) - { - chat.AddMessage(textRole, text!); - } - - return chat; - } - - private static CompletionsOptions CreateCompletionsOptions(string text, OpenAIPromptExecutionSettings executionSettings, string deploymentOrModelName) - { - if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) - { - throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); - } - - var options = new CompletionsOptions - { - Prompts = { text.Replace("\r\n", "\n") }, // normalize line endings - MaxTokens = executionSettings.MaxTokens, - Temperature = (float?)executionSettings.Temperature, - NucleusSamplingFactor = (float?)executionSettings.TopP, - FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, - PresencePenalty = (float?)executionSettings.PresencePenalty, - Echo = false, - ChoicesPerPrompt = executionSettings.ResultsPerPrompt, - GenerationSampleCount = executionSettings.ResultsPerPrompt, - LogProbabilityCount = executionSettings.TopLogprobs, - User = executionSettings.User, - DeploymentName = deploymentOrModelName - }; - - if (executionSettings.TokenSelectionBiases is not null) - { - foreach (var keyValue in executionSettings.TokenSelectionBiases) - { - options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); - } - } - - if (executionSettings.StopSequences is { Count: > 0 }) - { - foreach (var s in executionSettings.StopSequences) - { - options.StopSequences.Add(s); - } - } - - return options; - } - - private ChatCompletionsOptions CreateChatCompletionsOptions( - OpenAIPromptExecutionSettings executionSettings, - ChatHistory chatHistory, - Kernel? kernel, - string deploymentOrModelName) - { - if (executionSettings.ResultsPerPrompt is < 1 or > MaxResultsPerPrompt) - { - throw new ArgumentOutOfRangeException($"{nameof(executionSettings)}.{nameof(executionSettings.ResultsPerPrompt)}", executionSettings.ResultsPerPrompt, $"The value must be in range between 1 and {MaxResultsPerPrompt}, inclusive."); - } - - if (this.Logger.IsEnabled(LogLevel.Trace)) - { - this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", - JsonSerializer.Serialize(chatHistory), - JsonSerializer.Serialize(executionSettings)); - } - - var options = new ChatCompletionsOptions - { - MaxTokens = executionSettings.MaxTokens, - Temperature = (float?)executionSettings.Temperature, - NucleusSamplingFactor = (float?)executionSettings.TopP, - FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, - PresencePenalty = (float?)executionSettings.PresencePenalty, - ChoiceCount = executionSettings.ResultsPerPrompt, - DeploymentName = deploymentOrModelName, - Seed = executionSettings.Seed, - User = executionSettings.User, - LogProbabilitiesPerToken = executionSettings.TopLogprobs, - EnableLogProbabilities = executionSettings.Logprobs, - AzureExtensionsOptions = executionSettings.AzureChatExtensionsOptions - }; - - switch (executionSettings.ResponseFormat) - { - case ChatCompletionsResponseFormat formatObject: - // If the response format is an Azure SDK ChatCompletionsResponseFormat, just pass it along. - options.ResponseFormat = formatObject; - break; - - case string formatString: - // If the response format is a string, map the ones we know about, and ignore the rest. - switch (formatString) - { - case "json_object": - options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject; - break; - - case "text": - options.ResponseFormat = ChatCompletionsResponseFormat.Text; - break; - } - break; - - case JsonElement formatElement: - // This is a workaround for a type mismatch when deserializing a JSON into an object? type property. - // Handling only string formatElement. - if (formatElement.ValueKind == JsonValueKind.String) - { - string formatString = formatElement.GetString() ?? ""; - switch (formatString) - { - case "json_object": - options.ResponseFormat = ChatCompletionsResponseFormat.JsonObject; - break; - - case "text": - options.ResponseFormat = ChatCompletionsResponseFormat.Text; - break; - } - } - break; - } - - executionSettings.ToolCallBehavior?.ConfigureOptions(kernel, options); - if (executionSettings.TokenSelectionBiases is not null) - { - foreach (var keyValue in executionSettings.TokenSelectionBiases) - { - options.TokenSelectionBiases.Add(keyValue.Key, keyValue.Value); - } - } - - if (executionSettings.StopSequences is { Count: > 0 }) - { - foreach (var s in executionSettings.StopSequences) - { - options.StopSequences.Add(s); - } - } - - if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) - { - options.Messages.AddRange(GetRequestMessages(new ChatMessageContent(AuthorRole.System, executionSettings!.ChatSystemPrompt), executionSettings.ToolCallBehavior)); - } - - foreach (var message in chatHistory) - { - options.Messages.AddRange(GetRequestMessages(message, executionSettings.ToolCallBehavior)); - } - - return options; - } - - private static ChatRequestMessage GetRequestMessage(ChatRole chatRole, string contents, string? name, ChatCompletionsFunctionToolCall[]? tools) - { - if (chatRole == ChatRole.User) - { - return new ChatRequestUserMessage(contents) { Name = name }; - } - - if (chatRole == ChatRole.System) - { - return new ChatRequestSystemMessage(contents) { Name = name }; - } - - if (chatRole == ChatRole.Assistant) - { - var msg = new ChatRequestAssistantMessage(contents) { Name = name }; - if (tools is not null) - { - foreach (ChatCompletionsFunctionToolCall tool in tools) - { - msg.ToolCalls.Add(tool); - } - } - return msg; - } - - throw new NotImplementedException($"Role {chatRole} is not implemented"); - } - - private static List GetRequestMessages(ChatMessageContent message, ToolCallBehavior? toolCallBehavior) - { - if (message.Role == AuthorRole.System) - { - return [new ChatRequestSystemMessage(message.Content) { Name = message.AuthorName }]; - } - - if (message.Role == AuthorRole.Tool) - { - // Handling function results represented by the TextContent type. - // Example: new ChatMessageContent(AuthorRole.Tool, content, metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }) - if (message.Metadata?.TryGetValue(OpenAIChatMessageContent.ToolIdProperty, out object? toolId) is true && - toolId?.ToString() is string toolIdString) - { - return [new ChatRequestToolMessage(message.Content, toolIdString)]; - } - - // Handling function results represented by the FunctionResultContent type. - // Example: new ChatMessageContent(AuthorRole.Tool, items: new ChatMessageContentItemCollection { new FunctionResultContent(functionCall, result) }) - List? toolMessages = null; - foreach (var item in message.Items) - { - if (item is not FunctionResultContent resultContent) - { - continue; - } - - toolMessages ??= []; - - if (resultContent.Result is Exception ex) - { - toolMessages.Add(new ChatRequestToolMessage($"Error: Exception while invoking function. {ex.Message}", resultContent.CallId)); - continue; - } - - var stringResult = ProcessFunctionResult(resultContent.Result ?? string.Empty, toolCallBehavior); - - toolMessages.Add(new ChatRequestToolMessage(stringResult ?? string.Empty, resultContent.CallId)); - } - - if (toolMessages is not null) - { - return toolMessages; - } - - throw new NotSupportedException("No function result provided in the tool message."); - } - - if (message.Role == AuthorRole.User) - { - if (message.Items is { Count: 1 } && message.Items.FirstOrDefault() is TextContent textContent) - { - return [new ChatRequestUserMessage(textContent.Text) { Name = message.AuthorName }]; - } - - return [new ChatRequestUserMessage(message.Items.Select(static (KernelContent item) => (ChatMessageContentItem)(item switch - { - TextContent textContent => new ChatMessageTextContentItem(textContent.Text), - ImageContent imageContent => GetImageContentItem(imageContent), - _ => throw new NotSupportedException($"Unsupported chat message content type '{item.GetType()}'.") - }))) - { Name = message.AuthorName }]; - } - - if (message.Role == AuthorRole.Assistant) - { - var asstMessage = new ChatRequestAssistantMessage(message.Content) { Name = message.AuthorName }; - - // Handling function calls supplied via either: - // ChatCompletionsToolCall.ToolCalls collection items or - // ChatMessageContent.Metadata collection item with 'ChatResponseMessage.FunctionToolCalls' key. - IEnumerable? tools = (message as OpenAIChatMessageContent)?.ToolCalls; - if (tools is null && message.Metadata?.TryGetValue(OpenAIChatMessageContent.FunctionToolCallsProperty, out object? toolCallsObject) is true) - { - tools = toolCallsObject as IEnumerable; - if (tools is null && toolCallsObject is JsonElement { ValueKind: JsonValueKind.Array } array) - { - int length = array.GetArrayLength(); - var ftcs = new List(length); - for (int i = 0; i < length; i++) - { - JsonElement e = array[i]; - if (e.TryGetProperty("Id", out JsonElement id) && - e.TryGetProperty("Name", out JsonElement name) && - e.TryGetProperty("Arguments", out JsonElement arguments) && - id.ValueKind == JsonValueKind.String && - name.ValueKind == JsonValueKind.String && - arguments.ValueKind == JsonValueKind.String) - { - ftcs.Add(new ChatCompletionsFunctionToolCall(id.GetString()!, name.GetString()!, arguments.GetString()!)); - } - } - tools = ftcs; - } - } - - if (tools is not null) - { - asstMessage.ToolCalls.AddRange(tools); - } - - // Handling function calls supplied via ChatMessageContent.Items collection elements of the FunctionCallContent type. - HashSet? functionCallIds = null; - foreach (var item in message.Items) - { - if (item is not FunctionCallContent callRequest) - { - continue; - } - - functionCallIds ??= new HashSet(asstMessage.ToolCalls.Select(t => t.Id)); - - if (callRequest.Id is null || functionCallIds.Contains(callRequest.Id)) - { - continue; - } - - var argument = JsonSerializer.Serialize(callRequest.Arguments); - - asstMessage.ToolCalls.Add(new ChatCompletionsFunctionToolCall(callRequest.Id, FunctionName.ToFullyQualifiedName(callRequest.FunctionName, callRequest.PluginName, OpenAIFunction.NameSeparator), argument ?? string.Empty)); - } - - return [asstMessage]; - } - - throw new NotSupportedException($"Role {message.Role} is not supported."); - } - - private static ChatMessageImageContentItem GetImageContentItem(ImageContent imageContent) - { - if (imageContent.Data is { IsEmpty: false } data) - { - return new ChatMessageImageContentItem(BinaryData.FromBytes(data), imageContent.MimeType); - } - - if (imageContent.Uri is not null) - { - return new ChatMessageImageContentItem(imageContent.Uri); - } - - throw new ArgumentException($"{nameof(ImageContent)} must have either Data or a Uri."); - } - - private static ChatRequestMessage GetRequestMessage(ChatResponseMessage message) - { - if (message.Role == ChatRole.System) - { - return new ChatRequestSystemMessage(message.Content); - } - - if (message.Role == ChatRole.Assistant) - { - var msg = new ChatRequestAssistantMessage(message.Content); - if (message.ToolCalls is { Count: > 0 } tools) - { - foreach (ChatCompletionsToolCall tool in tools) - { - msg.ToolCalls.Add(tool); - } - } - - return msg; - } - - if (message.Role == ChatRole.User) - { - return new ChatRequestUserMessage(message.Content); - } - - throw new NotSupportedException($"Role {message.Role} is not supported."); - } - - private OpenAIChatMessageContent GetChatMessage(ChatChoice chatChoice, ChatCompletions responseData) - { - var message = new OpenAIChatMessageContent(chatChoice.Message, this.DeploymentOrModelName, GetChatChoiceMetadata(responseData, chatChoice)); - - message.Items.AddRange(this.GetFunctionCallContents(chatChoice.Message.ToolCalls)); - - return message; - } - - private OpenAIChatMessageContent GetChatMessage(ChatRole chatRole, string content, ChatCompletionsFunctionToolCall[] toolCalls, FunctionCallContent[]? functionCalls, IReadOnlyDictionary? metadata, string? authorName) - { - var message = new OpenAIChatMessageContent(chatRole, content, this.DeploymentOrModelName, toolCalls, metadata) - { - AuthorName = authorName, - }; - - if (functionCalls is not null) - { - message.Items.AddRange(functionCalls); - } - - return message; - } - - private IEnumerable GetFunctionCallContents(IEnumerable toolCalls) - { - List? result = null; - - foreach (var toolCall in toolCalls) - { - // Adding items of 'FunctionCallContent' type to the 'Items' collection even though the function calls are available via the 'ToolCalls' property. - // This allows consumers to work with functions in an LLM-agnostic way. - if (toolCall is ChatCompletionsFunctionToolCall functionToolCall) - { - Exception? exception = null; - KernelArguments? arguments = null; - try - { - arguments = JsonSerializer.Deserialize(functionToolCall.Arguments); - if (arguments is not null) - { - // Iterate over copy of the names to avoid mutating the dictionary while enumerating it - var names = arguments.Names.ToArray(); - foreach (var name in names) - { - arguments[name] = arguments[name]?.ToString(); - } - } - } - catch (JsonException ex) - { - exception = new KernelException("Error: Function call arguments were invalid JSON.", ex); - - if (this.Logger.IsEnabled(LogLevel.Debug)) - { - this.Logger.LogDebug(ex, "Failed to deserialize function arguments ({FunctionName}/{FunctionId}).", functionToolCall.Name, functionToolCall.Id); - } - } - - var functionName = FunctionName.Parse(functionToolCall.Name, OpenAIFunction.NameSeparator); - - var functionCallContent = new FunctionCallContent( - functionName: functionName.Name, - pluginName: functionName.PluginName, - id: functionToolCall.Id, - arguments: arguments) - { - InnerContent = functionToolCall, - Exception = exception - }; - - result ??= []; - result.Add(functionCallContent); - } - } - - return result ?? Enumerable.Empty(); - } - - private static void AddResponseMessage(ChatCompletionsOptions chatOptions, ChatHistory chat, string? result, string? errorMessage, ChatCompletionsToolCall toolCall, ILogger logger) - { - // Log any error - if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug)) - { - Debug.Assert(result is null); - logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolCall.Id, errorMessage); - } - - // Add the tool response message to the chat options - result ??= errorMessage ?? string.Empty; - chatOptions.Messages.Add(new ChatRequestToolMessage(result, toolCall.Id)); - - // Add the tool response message to the chat history. - var message = new ChatMessageContent(role: AuthorRole.Tool, content: result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }); - - if (toolCall is ChatCompletionsFunctionToolCall functionCall) - { - // Add an item of type FunctionResultContent to the ChatMessageContent.Items collection in addition to the function result stored as a string in the ChatMessageContent.Content property. - // This will enable migration to the new function calling model and facilitate the deprecation of the current one in the future. - var functionName = FunctionName.Parse(functionCall.Name, OpenAIFunction.NameSeparator); - message.Items.Add(new FunctionResultContent(functionName.Name, functionName.PluginName, functionCall.Id, result)); - } - - chat.Add(message); - } - - private static void ValidateMaxTokens(int? maxTokens) - { - if (maxTokens.HasValue && maxTokens < 1) - { - throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); - } - } - - private static void ValidateAutoInvoke(bool autoInvoke, int resultsPerPrompt) - { - if (autoInvoke && resultsPerPrompt != 1) - { - // We can remove this restriction in the future if valuable. However, multiple results per prompt is rare, - // and limiting this significantly curtails the complexity of the implementation. - throw new ArgumentException($"Auto-invocation of tool calls may only be used with a {nameof(OpenAIPromptExecutionSettings.ResultsPerPrompt)} of 1."); - } - } - - private static async Task RunRequestAsync(Func> request) - { - try - { - return await request.Invoke().ConfigureAwait(false); - } - catch (RequestFailedException e) - { - throw e.ToHttpOperationException(); - } - } - - /// - /// Captures usage details, including token information. - /// - /// Instance of with usage details. - private void LogUsage(CompletionsUsage usage) - { - if (usage is null) - { - this.Logger.LogDebug("Token usage information unavailable."); - return; - } - - if (this.Logger.IsEnabled(LogLevel.Information)) - { - this.Logger.LogInformation( - "Prompt tokens: {PromptTokens}. Completion tokens: {CompletionTokens}. Total tokens: {TotalTokens}.", - usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens); - } - - s_promptTokensCounter.Add(usage.PromptTokens); - s_completionTokensCounter.Add(usage.CompletionTokens); - s_totalTokensCounter.Add(usage.TotalTokens); - } - - /// - /// Processes the function result. - /// - /// The result of the function call. - /// The ToolCallBehavior object containing optional settings like JsonSerializerOptions.TypeInfoResolver. - /// A string representation of the function result. - private static string? ProcessFunctionResult(object functionResult, ToolCallBehavior? toolCallBehavior) - { - if (functionResult is string stringResult) - { - return stringResult; - } - - // This is an optimization to use ChatMessageContent content directly - // without unnecessary serialization of the whole message content class. - if (functionResult is ChatMessageContent chatMessageContent) - { - return chatMessageContent.ToString(); - } - - // For polymorphic serialization of unknown in advance child classes of the KernelContent class, - // a corresponding JsonTypeInfoResolver should be provided via the JsonSerializerOptions.TypeInfoResolver property. - // For more details about the polymorphic serialization, see the article at: - // https://learn.microsoft.com/en-us/dotnet/standard/serialization/system-text-json/polymorphism?pivots=dotnet-8-0 -#pragma warning disable CS0618 // Type or member is obsolete - return JsonSerializer.Serialize(functionResult, toolCallBehavior?.ToolCallResultSerializerOptions); -#pragma warning restore CS0618 // Type or member is obsolete - } - - /// - /// Executes auto function invocation filters and/or function itself. - /// This method can be moved to when auto function invocation logic will be extracted to common place. - /// - private static async Task OnAutoFunctionInvocationAsync( - Kernel kernel, - AutoFunctionInvocationContext context, - Func functionCallCallback) - { - await InvokeFilterOrFunctionAsync(kernel.AutoFunctionInvocationFilters, functionCallCallback, context).ConfigureAwait(false); - - return context; - } - - /// - /// This method will execute auto function invocation filters and function recursively. - /// If there are no registered filters, just function will be executed. - /// If there are registered filters, filter on position will be executed. - /// Second parameter of filter is callback. It can be either filter on + 1 position or function if there are no remaining filters to execute. - /// Function will be always executed as last step after all filters. - /// - private static async Task InvokeFilterOrFunctionAsync( - IList? autoFunctionInvocationFilters, - Func functionCallCallback, - AutoFunctionInvocationContext context, - int index = 0) - { - if (autoFunctionInvocationFilters is { Count: > 0 } && index < autoFunctionInvocationFilters.Count) - { - await autoFunctionInvocationFilters[index].OnAutoFunctionInvocationAsync(context, - (context) => InvokeFilterOrFunctionAsync(autoFunctionInvocationFilters, functionCallCallback, context, index + 1)).ConfigureAwait(false); - } - else - { - await functionCallCallback(context).ConfigureAwait(false); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs deleted file mode 100644 index e0f5733dd5c0..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/CustomHostPipelinePolicy.cs +++ /dev/null @@ -1,23 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using Azure.Core; -using Azure.Core.Pipeline; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI.Core.AzureSdk; - -internal sealed class CustomHostPipelinePolicy : HttpPipelineSynchronousPolicy -{ - private readonly Uri _endpoint; - - internal CustomHostPipelinePolicy(Uri endpoint) - { - this._endpoint = endpoint; - } - - public override void OnSendingRequest(HttpMessage message) - { - // Update current host to provided endpoint - message.Request?.Uri.Reset(this._endpoint); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs deleted file mode 100644 index 32cc0ab22f19..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIClientCore.cs +++ /dev/null @@ -1,106 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Runtime.CompilerServices; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI.Core.AzureSdk; -using Microsoft.SemanticKernel.Services; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Core implementation for OpenAI clients, providing common functionality and properties. -/// -internal sealed class OpenAIClientCore : ClientCore -{ - private const string DefaultPublicEndpoint = "https://api.openai.com/v1"; - - /// - /// Gets the attribute name used to store the organization in the dictionary. - /// - public static string OrganizationKey => "Organization"; - - /// - /// OpenAI / Azure OpenAI Client - /// - internal override OpenAIClient Client { get; } - - /// - /// Initializes a new instance of the class. - /// - /// Model name. - /// OpenAI API Key. - /// OpenAI compatible API endpoint. - /// OpenAI Organization Id (usually optional). - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal OpenAIClientCore( - string modelId, - string? apiKey = null, - Uri? endpoint = null, - string? organization = null, - HttpClient? httpClient = null, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(modelId); - - this.DeploymentOrModelName = modelId; - - var options = GetOpenAIClientOptions(httpClient); - - if (!string.IsNullOrWhiteSpace(organization)) - { - options.AddPolicy(new AddHeaderRequestPolicy("OpenAI-Organization", organization!), HttpPipelinePosition.PerCall); - } - - // Accepts the endpoint if provided, otherwise uses the default OpenAI endpoint. - var providedEndpoint = endpoint ?? httpClient?.BaseAddress; - if (providedEndpoint is null) - { - Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided. - this.Endpoint = new Uri(DefaultPublicEndpoint); - } - else - { - options.AddPolicy(new CustomHostPipelinePolicy(providedEndpoint), Azure.Core.HttpPipelinePosition.PerRetry); - this.Endpoint = providedEndpoint; - } - - this.Client = new OpenAIClient(apiKey ?? string.Empty, options); - } - - /// - /// Initializes a new instance of the class using the specified OpenAIClient. - /// Note: instances created this way might not have the default diagnostics settings, - /// it's up to the caller to configure the client. - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// The to use for logging. If null, no logging will be performed. - internal OpenAIClientCore( - string modelId, - OpenAIClient openAIClient, - ILogger? logger = null) : base(logger) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNull(openAIClient); - - this.DeploymentOrModelName = modelId; - this.Client = openAIClient; - } - - /// - /// Logs OpenAI action details. - /// - /// Caller member name. Populated automatically by runtime. - internal void LogActionDetails([CallerMemberName] string? callerMemberName = default) - { - if (this.Logger.IsEnabled(LogLevel.Information)) - { - this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.DeploymentOrModelName); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs deleted file mode 100644 index 126e1615a747..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingTextContent.cs +++ /dev/null @@ -1,51 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Text; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI and OpenAI Specialized streaming text content. -/// -/// -/// Represents a text content chunk that was streamed from the remote model. -/// -public sealed class OpenAIStreamingTextContent : StreamingTextContent -{ - /// - /// Create a new instance of the class. - /// - /// Text update - /// Index of the choice - /// The model ID used to generate the content - /// Inner chunk object - /// Metadata information - internal OpenAIStreamingTextContent( - string text, - int choiceIndex, - string modelId, - object? innerContentObject = null, - IReadOnlyDictionary? metadata = null) - : base( - text, - choiceIndex, - modelId, - innerContentObject, - Encoding.UTF8, - metadata) - { - } - - /// - public override byte[] ToByteArray() - { - return this.Encoding.GetBytes(this.ToString()); - } - - /// - public override string ToString() - { - return this.Text ?? string.Empty; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs deleted file mode 100644 index 7f3daaa2d941..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAITextToAudioClient.cs +++ /dev/null @@ -1,128 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text-to-audio client for HTTP operations. -/// -[Experimental("SKEXP0001")] -internal sealed class OpenAITextToAudioClient -{ - private readonly ILogger _logger; - private readonly HttpClient _httpClient; - - private readonly string _modelId; - private readonly string _apiKey; - private readonly string? _organization; - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Creates an instance of the with API key auth. - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal OpenAITextToAudioClient( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILogger? logger = null) - { - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - this._modelId = modelId; - this._apiKey = apiKey; - this._organization = organization; - - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = logger ?? NullLogger.Instance; - } - - internal async Task> GetAudioContentsAsync( - string text, - PromptExecutionSettings? executionSettings, - CancellationToken cancellationToken) - { - OpenAITextToAudioExecutionSettings? audioExecutionSettings = OpenAITextToAudioExecutionSettings.FromExecutionSettings(executionSettings); - - Verify.NotNullOrWhiteSpace(audioExecutionSettings?.Voice); - - using var request = this.GetRequest(text, audioExecutionSettings); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - var data = await response.Content.ReadAsByteArrayAndTranslateExceptionAsync().ConfigureAwait(false); - - return [new(data, this._modelId)]; - } - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - #region private - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken) - { - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add("Authorization", $"Bearer {this._apiKey}"); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAITextToAudioClient))); - - if (!string.IsNullOrWhiteSpace(this._organization)) - { - request.Headers.Add("OpenAI-Organization", this._organization); - } - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on text-to-audio request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private HttpRequestMessage GetRequest(string text, OpenAITextToAudioExecutionSettings executionSettings) - { - const string DefaultBaseUrl = "https://api.openai.com"; - - var baseUrl = !string.IsNullOrWhiteSpace(this._httpClient.BaseAddress?.AbsoluteUri) ? - this._httpClient.BaseAddress!.AbsoluteUri : - DefaultBaseUrl; - - var payload = new TextToAudioRequest(this._modelId, text, executionSettings.Voice) - { - ResponseFormat = executionSettings.ResponseFormat, - Speed = executionSettings.Speed - }; - - return HttpRequest.CreatePostRequest($"{baseUrl.TrimEnd('/')}/v1/audio/speech", payload); - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs deleted file mode 100644 index 7f49e74c5fa4..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataConfig.cs +++ /dev/null @@ -1,53 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Required configuration for Azure OpenAI chat completion with data. -/// More information: -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public class AzureOpenAIChatCompletionWithDataConfig -{ - /// - /// Azure OpenAI model ID or deployment name, see - /// - public string CompletionModelId { get; set; } = string.Empty; - - /// - /// Azure OpenAI deployment URL, see - /// - public string CompletionEndpoint { get; set; } = string.Empty; - - /// - /// Azure OpenAI API key, see - /// - public string CompletionApiKey { get; set; } = string.Empty; - - /// - /// Azure OpenAI Completion API version (e.g. 2024-02-01) - /// - public string CompletionApiVersion { get; set; } = string.Empty; - - /// - /// Data source endpoint URL. - /// For Azure AI Search, see - /// - public string DataSourceEndpoint { get; set; } = string.Empty; - - /// - /// Data source API key. - /// For Azure AI Search keys, see - /// - public string DataSourceApiKey { get; set; } = string.Empty; - - /// - /// Data source index name. - /// For Azure AI Search indexes, see - /// - public string DataSourceIndex { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs deleted file mode 100644 index 793209704bbf..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataService.cs +++ /dev/null @@ -1,305 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Runtime.CompilerServices; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.Text; -using Microsoft.SemanticKernel.TextGeneration; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI Chat Completion with data service. -/// More information: -/// -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -public sealed class AzureOpenAIChatCompletionWithDataService : IChatCompletionService, ITextGenerationService -{ - /// - /// Initializes a new instance of the class. - /// - /// Instance of class with completion configuration. - /// Custom for HTTP requests. - /// Instance of to use for logging. - public AzureOpenAIChatCompletionWithDataService( - AzureOpenAIChatCompletionWithDataConfig config, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this.ValidateConfig(config); - - this._config = config; - - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = loggerFactory?.CreateLogger(this.GetType()) ?? NullLogger.Instance; - this._attributes.Add(AIServiceExtensions.ModelIdKey, config.CompletionModelId); - } - - /// - public IReadOnlyDictionary Attributes => this._attributes; - - /// - public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this.InternalGetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); - - /// - public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this.InternalGetChatStreamingContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); - - /// - public async Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return (await this.GetChatMessageContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) - .Select(chat => new TextContent(chat.Content, chat.ModelId, chat, Encoding.UTF8, chat.Metadata)) - .ToList(); - } - - /// - public async IAsyncEnumerable GetStreamingTextContentsAsync( - string prompt, - PromptExecutionSettings? executionSettings = null, - Kernel? kernel = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - await foreach (var streamingChatContent in this.InternalGetChatStreamingContentsAsync(new ChatHistory(prompt), executionSettings, kernel, cancellationToken).ConfigureAwait(false)) - { - yield return new StreamingTextContent(streamingChatContent.Content, streamingChatContent.ChoiceIndex, streamingChatContent.ModelId, streamingChatContent, Encoding.UTF8, streamingChatContent.Metadata); - } - } - - #region private ================================================================================ - - private const string DefaultApiVersion = "2024-02-01"; - - private readonly AzureOpenAIChatCompletionWithDataConfig _config; - - private readonly HttpClient _httpClient; - private readonly ILogger _logger; - private readonly Dictionary _attributes = []; - private void ValidateConfig(AzureOpenAIChatCompletionWithDataConfig config) - { - Verify.NotNull(config); - - Verify.NotNullOrWhiteSpace(config.CompletionModelId); - Verify.NotNullOrWhiteSpace(config.CompletionEndpoint); - Verify.NotNullOrWhiteSpace(config.CompletionApiKey); - Verify.NotNullOrWhiteSpace(config.DataSourceEndpoint); - Verify.NotNullOrWhiteSpace(config.DataSourceApiKey); - Verify.NotNullOrWhiteSpace(config.DataSourceIndex); - } - - private async Task> InternalGetChatMessageContentsAsync( - ChatHistory chat, - PromptExecutionSettings? executionSettings, - Kernel? kernel, - CancellationToken cancellationToken = default) - { - var openAIExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings, OpenAIPromptExecutionSettings.DefaultTextMaxTokens); - - using var request = this.GetRequest(chat, openAIExecutionSettings, isStreamEnabled: false); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - - var body = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - var chatWithDataResponse = this.DeserializeResponse(body); - IReadOnlyDictionary metadata = GetResponseMetadata(chatWithDataResponse); - - return chatWithDataResponse.Choices.Select(choice => new AzureOpenAIWithDataChatMessageContent(choice, this.GetModelId(), metadata)).ToList(); - } - - private static Dictionary GetResponseMetadata(ChatWithDataResponse chatResponse) - { - return new Dictionary(5) - { - { nameof(chatResponse.Id), chatResponse.Id }, - { nameof(chatResponse.Model), chatResponse.Model }, - { nameof(chatResponse.Created), chatResponse.Created }, - { nameof(chatResponse.Object), chatResponse.Object }, - { nameof(chatResponse.Usage), chatResponse.Usage }, - }; - } - - private static Dictionary GetResponseMetadata(ChatWithDataStreamingResponse chatResponse) - { - return new Dictionary(4) - { - { nameof(chatResponse.Id), chatResponse.Id }, - { nameof(chatResponse.Model), chatResponse.Model }, - { nameof(chatResponse.Created), chatResponse.Created }, - { nameof(chatResponse.Object), chatResponse.Object }, - }; - } - - private async Task SendRequestAsync( - HttpRequestMessage request, - CancellationToken cancellationToken = default) - { - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add("Api-Key", this._config.CompletionApiKey); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(AzureOpenAIChatCompletionWithDataService))); - - try - { - return await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (HttpOperationException ex) - { - this._logger.LogError( - "Error occurred on chat completion with data request execution: {ExceptionMessage}", ex.Message); - - throw; - } - } - - private async IAsyncEnumerable InternalGetChatStreamingContentsAsync( - ChatHistory chatHistory, - PromptExecutionSettings? executionSettings = null, - Kernel? kernel = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - OpenAIPromptExecutionSettings chatRequestSettings = OpenAIPromptExecutionSettings.FromExecutionSettingsWithData(executionSettings); - - using var request = this.GetRequest(chatHistory, chatRequestSettings, isStreamEnabled: true); - using var response = await this.SendRequestAsync(request, cancellationToken).ConfigureAwait(false); - - const string ServerEventPayloadPrefix = "data:"; - - using var stream = await response.Content.ReadAsStreamAndTranslateExceptionAsync().ConfigureAwait(false); - using var reader = new StreamReader(stream); - - while (!reader.EndOfStream) - { - var body = await reader.ReadLineAsync( -#if NET - cancellationToken -#endif - ).ConfigureAwait(false); - - if (string.IsNullOrWhiteSpace(body)) - { - continue; - } - - if (body.StartsWith(ServerEventPayloadPrefix, StringComparison.Ordinal)) - { - body = body.Substring(ServerEventPayloadPrefix.Length); - } - - var chatWithDataResponse = this.DeserializeResponse(body); - IReadOnlyDictionary metadata = GetResponseMetadata(chatWithDataResponse); - - foreach (var choice in chatWithDataResponse.Choices) - { - yield return new AzureOpenAIWithDataStreamingChatMessageContent(choice, choice.Index, this.GetModelId()!, metadata); - } - } - } - - private T DeserializeResponse(string body) - { - var response = JsonSerializer.Deserialize(body, JsonOptionsCache.ReadPermissive); - - if (response is null) - { - const string ErrorMessage = "Error occurred on chat completion with data response deserialization"; - - this._logger.LogError(ErrorMessage); - - throw new KernelException(ErrorMessage); - } - - return response; - } - - private HttpRequestMessage GetRequest( - ChatHistory chat, - OpenAIPromptExecutionSettings executionSettings, - bool isStreamEnabled) - { - var payload = new ChatWithDataRequest - { - Temperature = executionSettings.Temperature, - TopP = executionSettings.TopP, - IsStreamEnabled = isStreamEnabled, - StopSequences = executionSettings.StopSequences, - MaxTokens = executionSettings.MaxTokens, - PresencePenalty = executionSettings.PresencePenalty, - FrequencyPenalty = executionSettings.FrequencyPenalty, - TokenSelectionBiases = executionSettings.TokenSelectionBiases ?? new Dictionary(), - DataSources = this.GetDataSources(), - Messages = this.GetMessages(chat) - }; - - return HttpRequest.CreatePostRequest(this.GetRequestUri(), payload); - } - - private List GetDataSources() - { - return - [ - new() - { - Parameters = new ChatWithDataSourceParameters - { - Endpoint = this._config.DataSourceEndpoint, - ApiKey = this._config.DataSourceApiKey, - IndexName = this._config.DataSourceIndex - } - } - ]; - } - - private List GetMessages(ChatHistory chat) - { - // The system role as the unique message is not allowed in the With Data APIs. - // This avoids the error: Invalid message request body. Learn how to use Completions extension API, please refer to https://learn.microsoft.com/azure/ai-services/openai/reference#completions-extensions - if (chat.Count == 1 && chat[0].Role == AuthorRole.System) - { - // Converts a system message to a user message if is the unique message in the chat. - chat[0].Role = AuthorRole.User; - } - - return chat - .Select(message => new ChatWithDataMessage - { - Role = message.Role.Label, - Content = message.Content ?? string.Empty - }) - .ToList(); - } - - private string GetRequestUri() - { - const string EndpointUriFormat = "{0}/openai/deployments/{1}/extensions/chat/completions?api-version={2}"; - - var apiVersion = this._config.CompletionApiVersion; - - if (string.IsNullOrWhiteSpace(apiVersion)) - { - apiVersion = DefaultApiVersion; - } - - return string.Format( - CultureInfo.InvariantCulture, - EndpointUriFormat, - this._config.CompletionEndpoint.TrimEnd('/'), - this._config.CompletionModelId, - apiVersion); - } - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs deleted file mode 100644 index ce3a5e5465e3..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataMessage.cs +++ /dev/null @@ -1,18 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataMessage -{ - [JsonPropertyName("role")] - public string Role { get; set; } = string.Empty; - - [JsonPropertyName("content")] - public string Content { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs deleted file mode 100644 index 214b917a8a13..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataRequest.cs +++ /dev/null @@ -1,71 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataRequest -{ - [JsonPropertyName("temperature")] - public double Temperature { get; set; } = 0; - - [JsonPropertyName("top_p")] - public double TopP { get; set; } = 0; - - [JsonPropertyName("stream")] - public bool IsStreamEnabled { get; set; } - - [JsonPropertyName("stop")] - public IList? StopSequences { get; set; } = Array.Empty(); - - [JsonPropertyName("max_tokens")] - public int? MaxTokens { get; set; } - - [JsonPropertyName("presence_penalty")] - public double PresencePenalty { get; set; } = 0; - - [JsonPropertyName("frequency_penalty")] - public double FrequencyPenalty { get; set; } = 0; - - [JsonPropertyName("logit_bias")] - public IDictionary TokenSelectionBiases { get; set; } = new Dictionary(); - - [JsonPropertyName("dataSources")] - public IList DataSources { get; set; } = Array.Empty(); - - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataSource -{ - [JsonPropertyName("type")] - // The current API only supports "AzureCognitiveSearch" as name otherwise an error is returned. - // Validation error at #/dataSources/0: Input tag 'AzureAISearch' found using 'type' does not match any of - // the expected tags: 'AzureCognitiveSearch', 'Elasticsearch', 'AzureCosmosDB', 'Pinecone', 'AzureMLIndex', 'Microsoft365' - public string Type { get; set; } = "AzureCognitiveSearch"; - - [JsonPropertyName("parameters")] - public ChatWithDataSourceParameters Parameters { get; set; } = new ChatWithDataSourceParameters(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataSourceParameters -{ - [JsonPropertyName("endpoint")] - public string Endpoint { get; set; } = string.Empty; - - [JsonPropertyName("key")] - public string ApiKey { get; set; } = string.Empty; - - [JsonPropertyName("indexName")] - public string IndexName { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs deleted file mode 100644 index 4ba5e7761319..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataResponse.cs +++ /dev/null @@ -1,57 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -#pragma warning disable CA1812 // Avoid uninstantiated internal classes - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[method: JsonConstructor] -internal sealed class ChatWithDataResponse(ChatWithDataUsage usage) -{ - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - [JsonPropertyName("created")] - public int Created { get; set; } = default; - - [JsonPropertyName("choices")] - public IList Choices { get; set; } = Array.Empty(); - - [JsonPropertyName("usage")] - public ChatWithDataUsage Usage { get; set; } = usage; - - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - [JsonPropertyName("object")] - public string Object { get; set; } = string.Empty; -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataChoice -{ - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataUsage -{ - [JsonPropertyName("prompt_tokens")] - public int PromptTokens { get; set; } - - [JsonPropertyName("completion_tokens")] - public int CompletionTokens { get; set; } - - [JsonPropertyName("total_tokens")] - public int TotalTokens { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs deleted file mode 100644 index 9455553d9642..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletionWithData/ChatWithDataStreamingResponse.cs +++ /dev/null @@ -1,64 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingResponse -{ - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - [JsonPropertyName("created")] - public int Created { get; set; } = default; - - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - [JsonPropertyName("object")] - public string Object { get; set; } = string.Empty; - - [JsonPropertyName("choices")] - public IList Choices { get; set; } = Array.Empty(); -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingChoice -{ - [JsonPropertyName("messages")] - public IList Messages { get; set; } = Array.Empty(); - - [JsonPropertyName("index")] - public int Index { get; set; } = 0; -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -[SuppressMessage("Performance", "CA1812:Avoid uninstantiated internal classes", Justification = "Used for JSON deserialization")] -internal sealed class ChatWithDataStreamingMessage -{ - [JsonPropertyName("delta")] - public ChatWithDataStreamingDelta Delta { get; set; } = new(); - - [JsonPropertyName("end_turn")] - public bool EndTurn { get; set; } -} - -[Experimental("SKEXP0010")] -[Obsolete("This class is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] -internal sealed class ChatWithDataStreamingDelta -{ - [JsonPropertyName("role")] - public string? Role { get; set; } - - [JsonPropertyName("content")] - public string Content { get; set; } = string.Empty; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml b/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..c3b3af979029 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml @@ -0,0 +1,18 @@ +๏ปฟ + + + + CP0002 + M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll + lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll + true + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj index f873d8d9cd29..ad8dc3ecaec7 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj @@ -10,18 +10,25 @@ true + + rc + + + + - Semantic Kernel - OpenAI and Azure OpenAI connectors - Semantic Kernel connectors for OpenAI and Azure OpenAI. Contains clients for text generation, chat completion, embedding and DALL-E text to image. + Semantic Kernel - OpenAI connector + Semantic Kernel connectors for OpenAI. Contains clients for chat completion, embedding and DALL-E text to image. - + + @@ -29,6 +36,6 @@ - + diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs new file mode 100644 index 000000000000..d2245e74f73d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.AudioToText.cs @@ -0,0 +1,87 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Audio; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// Input audio to generate the text + /// Audio-to-text execution settings for the prompt + /// The to monitor for cancellation requests. The default is . + /// Url of the generated image + internal async Task> GetTextFromAudioContentsAsync( + string targetModel, + AudioContent input, + PromptExecutionSettings? executionSettings, + CancellationToken cancellationToken) + { + if (!input.CanRead) + { + throw new ArgumentException("The input audio content is not readable.", nameof(input)); + } + + OpenAIAudioToTextExecutionSettings audioExecutionSettings = OpenAIAudioToTextExecutionSettings.FromExecutionSettings(executionSettings)!; + AudioTranscriptionOptions? audioOptions = AudioOptionsFromExecutionSettings(audioExecutionSettings); + + Verify.ValidFilename(audioExecutionSettings?.Filename); + + using var memoryStream = new MemoryStream(input.Data!.Value.ToArray()); + + AudioTranscription responseData = (await RunRequestAsync(() => this.Client!.GetAudioClient(targetModel).TranscribeAudioAsync(memoryStream, audioExecutionSettings?.Filename, audioOptions)).ConfigureAwait(false)).Value; + + return [new(responseData.Text, targetModel, metadata: GetResponseMetadata(responseData))]; + } + + /// + /// Converts to type. + /// + /// Instance of . + /// Instance of . + private static AudioTranscriptionOptions AudioOptionsFromExecutionSettings(OpenAIAudioToTextExecutionSettings executionSettings) + => new() + { + TimestampGranularities = AudioTimestampGranularities.Default, + Language = executionSettings.Language, + Prompt = executionSettings.Prompt, + Temperature = executionSettings.Temperature, + ResponseFormat = ConvertResponseFormat(executionSettings.ResponseFormat) + }; + + private static AudioTranscriptionFormat? ConvertResponseFormat(string? responseFormat) + { + if (responseFormat is null) + { + return null; + } + + return responseFormat switch + { + "json" => AudioTranscriptionFormat.Simple, + "verbose_json" => AudioTranscriptionFormat.Verbose, + "vtt" => AudioTranscriptionFormat.Vtt, + "srt" => AudioTranscriptionFormat.Srt, + _ => throw new NotSupportedException($"The audio transcription format '{responseFormat}' is not supported.") + }; + } + + private static Dictionary GetResponseMetadata(AudioTranscription audioTranscription) + => new(3) + { + [nameof(audioTranscription.Language)] = audioTranscription.Language, + [nameof(audioTranscription.Duration)] = audioTranscription.Duration, + [nameof(audioTranscription.Segments)] = audioTranscription.Segments + }; +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs new file mode 100644 index 000000000000..97a417e13f8a --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs @@ -0,0 +1,1037 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using JsonSchemaMapper; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Diagnostics; +using OpenAI.Chat; +using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// for JSON schema format for structured outputs. + /// + private static readonly JsonSchemaMapperConfiguration s_jsonSchemaMapperConfiguration = new() + { + IncludeSchemaVersion = false, + IncludeTypeInEnums = true, + TreatNullObliviousAsNonNullable = true, + TransformSchemaNode = OpenAIJsonSchemaTransformer.Transform + }; + + protected const string ModelProvider = "openai"; + protected record ToolCallingConfig(IList? Tools, ChatToolChoice? Choice, bool AutoInvoke, bool AllowAnyRequestedKernelFunction, FunctionChoiceBehaviorOptions? Options); + + /// + /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current + /// asynchronous chain of execution. + /// + /// + /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that + /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself, + /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close + /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution. + /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in + /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that + /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that, + /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent + /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made + /// configurable should need arise. + /// + protected const int MaxInflightAutoInvokes = 128; + + /// Singleton tool used when tool call count drops to 0 but we need to supply tools to keep the service happy. + protected static readonly ChatTool s_nonInvocableFunctionTool = ChatTool.CreateFunctionTool("NonInvocableTool"); + + /// + /// Instance of for metrics. + /// + protected static readonly Meter s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI"); + + /// + /// Instance of to keep track of the number of prompt tokens used. + /// + protected static readonly Counter s_promptTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.prompt", + unit: "{token}", + description: "Number of prompt tokens used"); + + /// + /// Instance of to keep track of the number of completion tokens used. + /// + protected static readonly Counter s_completionTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.completion", + unit: "{token}", + description: "Number of completion tokens used"); + + /// + /// Instance of to keep track of the total number of tokens used. + /// + protected static readonly Counter s_totalTokensCounter = + s_meter.CreateCounter( + name: "semantic_kernel.connectors.openai.tokens.total", + unit: "{token}", + description: "Number of tokens used"); + + protected virtual Dictionary GetChatCompletionMetadata(OpenAIChatCompletion completions) + { + return new Dictionary + { + { nameof(completions.Id), completions.Id }, + { nameof(completions.CreatedAt), completions.CreatedAt }, + { nameof(completions.SystemFingerprint), completions.SystemFingerprint }, + { nameof(completions.Usage), completions.Usage }, + { nameof(completions.Refusal), completions.Refusal }, + + // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. + { nameof(completions.FinishReason), completions.FinishReason.ToString() }, + { nameof(completions.ContentTokenLogProbabilities), completions.ContentTokenLogProbabilities }, + }; + } + + protected static Dictionary GetChatCompletionMetadata(StreamingChatCompletionUpdate completionUpdate) + { + return new Dictionary + { + { nameof(completionUpdate.CompletionId), completionUpdate.CompletionId }, + { nameof(completionUpdate.CreatedAt), completionUpdate.CreatedAt }, + { nameof(completionUpdate.SystemFingerprint), completionUpdate.SystemFingerprint }, + { nameof(completionUpdate.RefusalUpdate), completionUpdate.RefusalUpdate }, + { nameof(completionUpdate.Usage), completionUpdate.Usage }, + + // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it. + { nameof(completionUpdate.FinishReason), completionUpdate.FinishReason?.ToString() }, + }; + } + + /// + /// Generate a new chat message + /// + /// Model identifier + /// Chat history + /// Execution settings for the completion API. + /// The containing services, plugins, and other state for use throughout the operation. + /// Async cancellation token + /// Generated chat message in string format + internal async Task> GetChatMessageContentsAsync( + string targetModel, + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + Verify.NotNull(chatHistory); + + if (this.Logger!.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", + JsonSerializer.Serialize(chatHistory), + JsonSerializer.Serialize(executionSettings)); + } + + // Convert the incoming execution settings to OpenAI settings. + OpenAIPromptExecutionSettings chatExecutionSettings = this.GetSpecializedExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + for (int requestIndex = 0; ; requestIndex++) + { + var chatForRequest = CreateChatCompletionMessages(chatExecutionSettings, chatHistory); + + var functionCallingConfig = this.GetFunctionCallingConfiguration(kernel, chatExecutionSettings, chatHistory, requestIndex); + + var chatOptions = this.CreateChatCompletionOptions(chatExecutionSettings, chatHistory, functionCallingConfig, kernel); + + // Make the request. + OpenAIChatCompletion? chatCompletion = null; + OpenAIChatMessageContent chatMessageContent; + using (var activity = this.StartCompletionActivity(chatHistory, chatExecutionSettings)) + { + try + { + chatCompletion = (await RunRequestAsync(() => this.Client!.GetChatClient(targetModel).CompleteChatAsync(chatForRequest, chatOptions, cancellationToken)).ConfigureAwait(false)).Value; + + this.LogUsage(chatCompletion.Usage); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + if (chatCompletion != null) + { + // Capture available metadata even if the operation failed. + activity + .SetResponseId(chatCompletion.Id) + .SetPromptTokenUsage(chatCompletion.Usage.InputTokenCount) + .SetCompletionTokenUsage(chatCompletion.Usage.OutputTokenCount); + } + + throw; + } + + chatMessageContent = this.CreateChatMessageContent(chatCompletion, targetModel); + activity?.SetCompletionResponse([chatMessageContent], chatCompletion.Usage.InputTokenCount, chatCompletion.Usage.OutputTokenCount); + } + + // If we don't want to attempt to invoke any functions or there is nothing to call, just return the result. + if (!functionCallingConfig.AutoInvoke || chatCompletion.ToolCalls.Count == 0) + { + return [chatMessageContent]; + } + + // Process function calls by invoking the functions and adding the results to the chat history. + // Each function call will trigger auto-function-invocation filters, which can terminate the process. + // In such cases, we'll return the last message in the chat history. + var lastMessage = await this.FunctionCallsProcessor.ProcessFunctionCallsAsync( + chatMessageContent, + chatHistory, + requestIndex, + (FunctionCallContent content) => IsRequestableTool(chatOptions.Tools, content), + kernel, + cancellationToken).ConfigureAwait(false); + if (lastMessage != null) + { + return [lastMessage]; + } + + // Process non-function tool calls. + this.ProcessNonFunctionToolCalls(chatCompletion.ToolCalls, chatHistory); + } + } + + internal async IAsyncEnumerable GetStreamingChatMessageContentsAsync( + string targetModel, + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(chatHistory); + + if (this.Logger!.IsEnabled(LogLevel.Trace)) + { + this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}", + JsonSerializer.Serialize(chatHistory), + JsonSerializer.Serialize(executionSettings)); + } + + OpenAIPromptExecutionSettings chatExecutionSettings = this.GetSpecializedExecutionSettings(executionSettings); + + ValidateMaxTokens(chatExecutionSettings.MaxTokens); + + StringBuilder? contentBuilder = null; + Dictionary? toolCallIdsByIndex = null; + Dictionary? functionNamesByIndex = null; + Dictionary? functionArgumentBuildersByIndex = null; + + for (int requestIndex = 0; ; requestIndex++) + { + var chatForRequest = CreateChatCompletionMessages(chatExecutionSettings, chatHistory); + + var toolCallingConfig = this.GetFunctionCallingConfiguration(kernel, chatExecutionSettings, chatHistory, requestIndex); + + var chatOptions = this.CreateChatCompletionOptions(chatExecutionSettings, chatHistory, toolCallingConfig, kernel); + + // Reset state + contentBuilder?.Clear(); + toolCallIdsByIndex?.Clear(); + functionNamesByIndex?.Clear(); + functionArgumentBuildersByIndex?.Clear(); + + // Stream the response. + IReadOnlyDictionary? metadata = null; + string? streamedName = null; + ChatMessageRole? streamedRole = default; + ChatFinishReason finishReason = default; + ChatToolCall[]? toolCalls = null; + FunctionCallContent[]? functionCallContents = null; + + using (var activity = this.StartCompletionActivity(chatHistory, chatExecutionSettings)) + { + // Make the request. + AsyncCollectionResult response; + try + { + response = RunRequest(() => this.Client!.GetChatClient(targetModel).CompleteChatStreamingAsync(chatForRequest, chatOptions, cancellationToken)); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + var responseEnumerator = response.ConfigureAwait(false).GetAsyncEnumerator(); + List? streamedContents = activity is not null ? [] : null; + try + { + while (true) + { + try + { + if (!await responseEnumerator.MoveNextAsync()) + { + break; + } + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + StreamingChatCompletionUpdate chatCompletionUpdate = responseEnumerator.Current; + metadata = GetChatCompletionMetadata(chatCompletionUpdate); + streamedRole ??= chatCompletionUpdate.Role; + //streamedName ??= update.AuthorName; + finishReason = chatCompletionUpdate.FinishReason ?? default; + + // If we're intending to invoke function calls, we need to consume that function call information. + if (toolCallingConfig.AutoInvoke) + { + try + { + foreach (var contentPart in chatCompletionUpdate.ContentUpdate) + { + if (contentPart.Kind == ChatMessageContentPartKind.Text) + { + (contentBuilder ??= new()).Append(contentPart.Text); + } + } + OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatCompletionUpdate.ToolCallUpdates, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + } + catch (NullReferenceException) + { + // Temporary workaround for OpenAI SDK Bug here: https://github.com/openai/openai-dotnet/issues/198 + // TODO: Remove this try-catch block once the bug is fixed. + } + } + + var openAIStreamingChatMessageContent = new OpenAIStreamingChatMessageContent(chatCompletionUpdate, 0, targetModel, metadata); + + if (openAIStreamingChatMessageContent.ToolCallUpdates is not null) + { + foreach (var functionCallUpdate in openAIStreamingChatMessageContent.ToolCallUpdates!) + { + // Using the code below to distinguish and skip non - function call related updates. + // The Kind property of updates can't be reliably used because it's only initialized for the first update. + if (string.IsNullOrEmpty(functionCallUpdate.ToolCallId) && + string.IsNullOrEmpty(functionCallUpdate.FunctionName) && + (functionCallUpdate.FunctionArgumentsUpdate is null || functionCallUpdate.FunctionArgumentsUpdate.ToMemory().IsEmpty)) + { + continue; + } + + openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent( + callId: functionCallUpdate.ToolCallId, + name: functionCallUpdate.FunctionName, + arguments: functionCallUpdate.FunctionArgumentsUpdate?.ToString(), + functionCallIndex: functionCallUpdate.Index)); + } + } + streamedContents?.Add(openAIStreamingChatMessageContent); + yield return openAIStreamingChatMessageContent; + } + + // Translate all entries into ChatCompletionsFunctionToolCall instances. + toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls( + ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + + // Translate all entries into FunctionCallContent instances for diagnostics purposes. + functionCallContents = this.GetFunctionCallContents(toolCalls).ToArray(); + } + finally + { + activity?.EndStreaming(streamedContents, ModelDiagnostics.IsSensitiveEventsEnabled() ? functionCallContents : null); + await responseEnumerator.DisposeAsync(); + } + } + + // If we don't have a function to invoke, we're done. + // Note that we don't check the FinishReason and instead check whether there are any tool calls, as the service + // may return a FinishReason of "stop" even if there are tool calls to be made, in particular if a required tool + // is specified. + if (!toolCallingConfig.AutoInvoke || + toolCallIdsByIndex is not { Count: > 0 }) + { + yield break; + } + + // Get any response content that was streamed. + string content = contentBuilder?.ToString() ?? string.Empty; + + var chatMessageContent = this.CreateChatMessageContent(streamedRole ?? default, content, toolCalls, functionCallContents, metadata, streamedName); + + // Process function calls by invoking the functions and adding the results to the chat history. + // Each function call will trigger auto-function-invocation filters, which can terminate the process. + // In such cases, we'll return the last message in the chat history. + var lastMessage = await this.FunctionCallsProcessor.ProcessFunctionCallsAsync( + chatMessageContent, + chatHistory, + requestIndex, + (FunctionCallContent content) => IsRequestableTool(chatOptions.Tools, content), + kernel, + cancellationToken).ConfigureAwait(false); + if (lastMessage != null) + { + yield return new OpenAIStreamingChatMessageContent(lastMessage.Role, lastMessage.Content); + yield break; + } + + // Process non-function tool calls. + this.ProcessNonFunctionToolCalls(toolCalls, chatHistory); + } + } + + internal async IAsyncEnumerable GetChatAsTextStreamingContentsAsync( + string targetModel, + string prompt, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatSettings = this.GetSpecializedExecutionSettings(executionSettings); + ChatHistory chat = CreateNewChat(prompt, chatSettings); + + await foreach (var chatUpdate in this.GetStreamingChatMessageContentsAsync(targetModel, chat, executionSettings, kernel, cancellationToken).ConfigureAwait(false)) + { + yield return new StreamingTextContent(chatUpdate.Content, chatUpdate.ChoiceIndex, chatUpdate.ModelId, chatUpdate, Encoding.UTF8, chatUpdate.Metadata); + } + } + + internal async Task> GetChatAsTextContentsAsync( + string model, + string text, + PromptExecutionSettings? executionSettings, + Kernel? kernel, + CancellationToken cancellationToken = default) + { + OpenAIPromptExecutionSettings chatSettings = this.GetSpecializedExecutionSettings(executionSettings); + + ChatHistory chat = CreateNewChat(text, chatSettings); + return (await this.GetChatMessageContentsAsync(model, chat, chatSettings, kernel, cancellationToken).ConfigureAwait(false)) + .Select(chat => new TextContent(chat.Content, chat.ModelId, chat.Content, Encoding.UTF8, chat.Metadata)) + .ToList(); + } + + /// + /// Returns a specialized execution settings object for the OpenAI chat completion service. + /// + /// Potential execution settings infer specialized. + /// Specialized settings + protected virtual OpenAIPromptExecutionSettings GetSpecializedExecutionSettings(PromptExecutionSettings? executionSettings) + => OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + + /// + /// Start a chat completion activity for a given model. + /// The activity will be tagged with the a set of attributes specified by the semantic conventions. + /// + protected virtual Activity? StartCompletionActivity(ChatHistory chatHistory, PromptExecutionSettings settings) + => ModelDiagnostics.StartCompletionActivity(this.Endpoint, this.ModelId, ModelProvider, chatHistory, settings); + + protected virtual ChatCompletionOptions CreateChatCompletionOptions( + OpenAIPromptExecutionSettings executionSettings, + ChatHistory chatHistory, + ToolCallingConfig toolCallingConfig, + Kernel? kernel) + { + var options = new ChatCompletionOptions + { + MaxOutputTokenCount = executionSettings.MaxTokens, + Temperature = (float?)executionSettings.Temperature, + TopP = (float?)executionSettings.TopP, + FrequencyPenalty = (float?)executionSettings.FrequencyPenalty, + PresencePenalty = (float?)executionSettings.PresencePenalty, +#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + Seed = executionSettings.Seed, +#pragma warning restore OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed. + EndUserId = executionSettings.User, + TopLogProbabilityCount = executionSettings.TopLogprobs, + IncludeLogProbabilities = executionSettings.Logprobs, + }; + + var responseFormat = GetResponseFormat(executionSettings); + if (responseFormat is not null) + { + options.ResponseFormat = responseFormat; + } + + if (toolCallingConfig.Choice is not null) + { + options.ToolChoice = toolCallingConfig.Choice; + } + + if (toolCallingConfig.Tools is { Count: > 0 } tools) + { + options.Tools.AddRange(tools); + } + + if (executionSettings.TokenSelectionBiases is not null) + { + foreach (var keyValue in executionSettings.TokenSelectionBiases) + { + options.LogitBiases.Add(keyValue.Key, keyValue.Value); + } + } + + if (executionSettings.StopSequences is { Count: > 0 }) + { + foreach (var s in executionSettings.StopSequences) + { + options.StopSequences.Add(s); + } + } + + return options; + } + + /// + /// Retrieves the response format based on the provided settings. + /// + /// Execution settings. + /// Chat response format + protected static ChatResponseFormat? GetResponseFormat(OpenAIPromptExecutionSettings executionSettings) + { + switch (executionSettings.ResponseFormat) + { + case ChatResponseFormat formatObject: + // If the response format is an OpenAI SDK ChatCompletionsResponseFormat, just pass it along. + return formatObject; + case string formatString: + // If the response format is a string, map the ones we know about, and ignore the rest. + switch (formatString) + { + case "json_object": + return ChatResponseFormat.CreateJsonObjectFormat(); + + case "text": + return ChatResponseFormat.CreateTextFormat(); + } + + break; + + case JsonElement formatElement: + // This is a workaround for a type mismatch when deserializing a JSON into an object? type property. + // Handling only string formatElement. + if (formatElement.ValueKind == JsonValueKind.String) + { + string formatString = formatElement.GetString() ?? ""; + switch (formatString) + { + case "json_object": + return ChatResponseFormat.CreateJsonObjectFormat(); + + case "text": + return ChatResponseFormat.CreateTextFormat(); + } + } + + break; + case Type formatObjectType: + return GetJsonSchemaResponseFormat(formatObjectType); + } + + return null; + } + + /// + /// Gets instance of object for JSON schema format for structured outputs. + /// + private static ChatResponseFormat GetJsonSchemaResponseFormat(Type formatObjectType) + { + var type = formatObjectType.IsGenericType && formatObjectType.GetGenericTypeDefinition() == typeof(Nullable<>) ? Nullable.GetUnderlyingType(formatObjectType)! : formatObjectType; + + var schema = KernelJsonSchemaBuilder.Build(options: null, type, configuration: s_jsonSchemaMapperConfiguration); + var schemaBinaryData = BinaryData.FromString(schema.ToString()); + + return ChatResponseFormat.CreateJsonSchemaFormat(type.Name, schemaBinaryData, jsonSchemaIsStrict: true); + } + + /// Checks if a tool call is for a function that was defined. + private static bool IsRequestableTool(IList tools, FunctionCallContent functionCallContent) + { + for (int i = 0; i < tools.Count; i++) + { + if (tools[i].Kind == ChatToolKind.Function && + string.Equals(tools[i].FunctionName, FunctionName.ToFullyQualifiedName(functionCallContent.FunctionName, functionCallContent.PluginName, OpenAIFunction.NameSeparator), StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + + /// + /// Create a new empty chat instance + /// + /// Optional chat instructions for the AI service + /// Execution settings + /// Chat object + private static ChatHistory CreateNewChat(string? text = null, OpenAIPromptExecutionSettings? executionSettings = null) + { + var chat = new ChatHistory(); + + // If settings is not provided, create a new chat with the text as the system prompt + AuthorRole textRole = AuthorRole.System; + + if (!string.IsNullOrWhiteSpace(executionSettings?.ChatSystemPrompt)) + { + chat.AddSystemMessage(executionSettings!.ChatSystemPrompt!); + textRole = AuthorRole.User; + } + + if (!string.IsNullOrWhiteSpace(text)) + { + chat.AddMessage(textRole, text!); + } + + return chat; + } + + private static List CreateChatCompletionMessages(OpenAIPromptExecutionSettings executionSettings, ChatHistory chatHistory) + { + List messages = []; + + if (!string.IsNullOrWhiteSpace(executionSettings.ChatSystemPrompt) && !chatHistory.Any(m => m.Role == AuthorRole.System)) + { + messages.Add(new SystemChatMessage(executionSettings.ChatSystemPrompt)); + } + + foreach (var message in chatHistory) + { + messages.AddRange(CreateRequestMessages(message)); + } + + return messages; + } + + private static List CreateRequestMessages(ChatMessageContent message) + { + if (message.Role == AuthorRole.System) + { + return [new SystemChatMessage(message.Content) { ParticipantName = message.AuthorName }]; + } + + if (message.Role == AuthorRole.Tool) + { + // Handling function results represented by the TextContent type. + // Example: new ChatMessageContent(AuthorRole.Tool, content, metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }) + if (message.Metadata?.TryGetValue(OpenAIChatMessageContent.ToolIdProperty, out object? toolId) is true && + toolId?.ToString() is string toolIdString) + { + return [new ToolChatMessage(toolIdString, message.Content)]; + } + + // Handling function results represented by the FunctionResultContent type. + // Example: new ChatMessageContent(AuthorRole.Tool, items: new ChatMessageContentItemCollection { new FunctionResultContent(functionCall, result) }) + List? toolMessages = null; + foreach (var item in message.Items) + { + if (item is not FunctionResultContent resultContent) + { + continue; + } + + toolMessages ??= []; + + if (resultContent.Result is Exception ex) + { + toolMessages.Add(new ToolChatMessage(resultContent.CallId, $"Error: Exception while invoking function. {ex.Message}")); + continue; + } + + var stringResult = FunctionCalling.FunctionCallsProcessor.ProcessFunctionResult(resultContent.Result ?? string.Empty); + + toolMessages.Add(new ToolChatMessage(resultContent.CallId, stringResult ?? string.Empty)); + } + + if (toolMessages is not null) + { + return toolMessages; + } + + throw new NotSupportedException("No function result provided in the tool message."); + } + + if (message.Role == AuthorRole.User) + { + if (message.Items is { Count: 1 } && message.Items.FirstOrDefault() is TextContent textContent) + { + return [new UserChatMessage(textContent.Text) { ParticipantName = message.AuthorName }]; + } + + return + [ + new UserChatMessage(message.Items.Select(static (KernelContent item) => item switch + { + TextContent textContent => ChatMessageContentPart.CreateTextPart(textContent.Text), + ImageContent imageContent => GetImageContentItem(imageContent), + _ => throw new NotSupportedException($"Unsupported chat message content type '{item.GetType()}'.") + })) + { ParticipantName = message.AuthorName } + ]; + } + + if (message.Role == AuthorRole.Assistant) + { + var toolCalls = new List(); + + // Handling function calls supplied via either: + // ChatCompletionsToolCall.ToolCalls collection items or + // ChatMessageContent.Metadata collection item with 'ChatResponseMessage.FunctionToolCalls' key. + IEnumerable? tools = (message as OpenAIChatMessageContent)?.ToolCalls; + if (tools is null && message.Metadata?.TryGetValue(OpenAIChatMessageContent.FunctionToolCallsProperty, out object? toolCallsObject) is true) + { + tools = toolCallsObject as IEnumerable; + if (tools is null && toolCallsObject is JsonElement { ValueKind: JsonValueKind.Array } array) + { + int length = array.GetArrayLength(); + var ftcs = new List(length); + for (int i = 0; i < length; i++) + { + JsonElement e = array[i]; + if (e.TryGetProperty("Id", out JsonElement id) && + e.TryGetProperty("Name", out JsonElement name) && + e.TryGetProperty("Arguments", out JsonElement arguments) && + id.ValueKind == JsonValueKind.String && + name.ValueKind == JsonValueKind.String && + arguments.ValueKind == JsonValueKind.String) + { + ftcs.Add(ChatToolCall.CreateFunctionToolCall(id.GetString()!, name.GetString()!, BinaryData.FromString(arguments.GetString()!))); + } + } + tools = ftcs; + } + } + + if (tools is not null) + { + toolCalls.AddRange(tools); + } + + // Handling function calls supplied via ChatMessageContent.Items collection elements of the FunctionCallContent type. + HashSet? functionCallIds = null; + foreach (var item in message.Items) + { + if (item is not FunctionCallContent callRequest) + { + continue; + } + + functionCallIds ??= new HashSet(toolCalls.Select(t => t.Id)); + + if (callRequest.Id is null || functionCallIds.Contains(callRequest.Id)) + { + continue; + } + + var argument = JsonSerializer.Serialize(callRequest.Arguments); + + toolCalls.Add(ChatToolCall.CreateFunctionToolCall(callRequest.Id, FunctionName.ToFullyQualifiedName(callRequest.FunctionName, callRequest.PluginName, OpenAIFunction.NameSeparator), BinaryData.FromString(argument ?? string.Empty))); + } + + // This check is necessary to prevent an exception that will be thrown if the toolCalls collection is empty. + // HTTP 400 (invalid_request_error:) [] should be non-empty - 'messages.3.tool_calls' + if (toolCalls.Count == 0) + { + return [new AssistantChatMessage(message.Content) { ParticipantName = message.AuthorName }]; + } + + var assistantMessage = new AssistantChatMessage(toolCalls) { ParticipantName = message.AuthorName }; + if (message.Content is { } content) + { + assistantMessage.Content.Add(content); + } + + return [assistantMessage]; + } + + throw new NotSupportedException($"Role {message.Role} is not supported."); + } + + private static ChatMessageContentPart GetImageContentItem(ImageContent imageContent) + { + if (imageContent.Data is { IsEmpty: false } data) + { + return ChatMessageContentPart.CreateImagePart(BinaryData.FromBytes(data), imageContent.MimeType); + } + + if (imageContent.Uri is not null) + { + return ChatMessageContentPart.CreateImagePart(imageContent.Uri); + } + + throw new ArgumentException($"{nameof(ImageContent)} must have either Data or a Uri."); + } + + private OpenAIChatMessageContent CreateChatMessageContent(OpenAIChatCompletion completion, string targetModel) + { + var message = new OpenAIChatMessageContent(completion, targetModel, this.GetChatCompletionMetadata(completion)); + + message.Items.AddRange(this.GetFunctionCallContents(completion.ToolCalls)); + + return message; + } + + private OpenAIChatMessageContent CreateChatMessageContent(ChatMessageRole chatRole, string content, ChatToolCall[] toolCalls, FunctionCallContent[]? functionCalls, IReadOnlyDictionary? metadata, string? authorName) + { + var message = new OpenAIChatMessageContent(chatRole, content, this.ModelId, toolCalls, metadata) + { + AuthorName = authorName, + }; + + if (functionCalls is not null) + { + message.Items.AddRange(functionCalls); + } + + return message; + } + + private List GetFunctionCallContents(IEnumerable toolCalls) + { + List result = []; + + foreach (var toolCall in toolCalls) + { + // Adding items of 'FunctionCallContent' type to the 'Items' collection even though the function calls are available via the 'ToolCalls' property. + // This allows consumers to work with functions in an LLM-agnostic way. + if (toolCall.Kind == ChatToolCallKind.Function) + { + Exception? exception = null; + KernelArguments? arguments = null; + try + { + arguments = JsonSerializer.Deserialize(toolCall.FunctionArguments); + if (arguments is not null) + { + // Iterate over copy of the names to avoid mutating the dictionary while enumerating it + var names = arguments.Names.ToArray(); + foreach (var name in names) + { + arguments[name] = arguments[name]?.ToString(); + } + } + } + catch (JsonException ex) + { + exception = new KernelException("Error: Function call arguments were invalid JSON.", ex); + + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug(ex, "Failed to deserialize function arguments ({FunctionName}/{FunctionId}).", toolCall.FunctionName, toolCall.Id); + } + } + + var functionName = FunctionName.Parse(toolCall.FunctionName, OpenAIFunction.NameSeparator); + + var functionCallContent = new FunctionCallContent( + functionName: functionName.Name, + pluginName: functionName.PluginName, + id: toolCall.Id, + arguments: arguments) + { + InnerContent = toolCall, + Exception = exception + }; + + result.Add(functionCallContent); + } + } + + return result; + } + + private static void ValidateMaxTokens(int? maxTokens) + { + if (maxTokens.HasValue && maxTokens < 1) + { + throw new ArgumentException($"MaxTokens {maxTokens} is not valid, the value must be greater than zero"); + } + } + + /// + /// Captures usage details, including token information. + /// + /// Instance of with token usage details. + private void LogUsage(ChatTokenUsage usage) + { + if (usage is null) + { + this.Logger!.LogDebug("Token usage information unavailable."); + return; + } + + if (this.Logger!.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation( + "Prompt tokens: {InputTokenCount}. Completion tokens: {OutputTokenCount}. Total tokens: {TotalTokenCount}.", + usage.InputTokenCount, usage.OutputTokenCount, usage.TotalTokenCount); + } + + s_promptTokensCounter.Add(usage.InputTokenCount); + s_completionTokensCounter.Add(usage.OutputTokenCount); + s_totalTokensCounter.Add(usage.TotalTokenCount); + } + + private ToolCallingConfig GetFunctionCallingConfiguration(Kernel? kernel, OpenAIPromptExecutionSettings executionSettings, ChatHistory chatHistory, int requestIndex) + { + // If neither behavior is specified, we just return default configuration with no tool and no choice + if (executionSettings.FunctionChoiceBehavior is null && executionSettings.ToolCallBehavior is null) + { + return new ToolCallingConfig(Tools: null, Choice: null, AutoInvoke: false, AllowAnyRequestedKernelFunction: false, Options: null); + } + + // If both behaviors are specified, we can't handle that. + if (executionSettings.FunctionChoiceBehavior is not null && executionSettings.ToolCallBehavior is not null) + { + throw new ArgumentException($"{nameof(executionSettings.ToolCallBehavior)} and {nameof(executionSettings.FunctionChoiceBehavior)} cannot be used together."); + } + + IList? tools = null; + ChatToolChoice? choice = null; + bool autoInvoke = false; + bool allowAnyRequestedKernelFunction = false; + FunctionChoiceBehaviorOptions? options = null; + + // Handling new tool behavior represented by `PromptExecutionSettings.FunctionChoiceBehavior` property. + if (executionSettings.FunctionChoiceBehavior is { } functionChoiceBehavior) + { + (tools, choice, autoInvoke, options) = this.ConfigureFunctionCalling(kernel, requestIndex, functionChoiceBehavior, chatHistory); + } + // Handling old-style tool call behavior represented by `OpenAIPromptExecutionSettings.ToolCallBehavior` property. + else if (executionSettings.ToolCallBehavior is { } toolCallBehavior) + { + (tools, choice, autoInvoke, int maximumAutoInvokeAttempts, allowAnyRequestedKernelFunction) = this.ConfigureFunctionCalling(kernel, requestIndex, toolCallBehavior); + + // Disable auto invocation if we've exceeded the allowed limit. + if (requestIndex >= maximumAutoInvokeAttempts) + { + autoInvoke = false; + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", maximumAutoInvokeAttempts); + } + } + // Disable auto invocation if we've exceeded the allowed limit of in-flight auto-invokes. + else if (FunctionCalling.FunctionCallsProcessor.s_inflightAutoInvokes.Value >= MaxInflightAutoInvokes) + { + autoInvoke = false; + } + } + + return new ToolCallingConfig( + Tools: tools ?? [s_nonInvocableFunctionTool], + Choice: choice ?? ChatToolChoice.CreateNoneChoice(), + AutoInvoke: autoInvoke, + AllowAnyRequestedKernelFunction: allowAnyRequestedKernelFunction, + Options: options); + } + + private (IList? Tools, ChatToolChoice? Choice, bool AutoInvoke, int MaximumAutoInvokeAttempts, bool AllowAnyRequestedKernelFunction) ConfigureFunctionCalling(Kernel? kernel, int requestIndex, ToolCallBehavior toolCallBehavior) + { + IList? tools = null; + ChatToolChoice? choice = null; + bool autoInvoke = kernel is not null && toolCallBehavior.MaximumAutoInvokeAttempts > 0; + bool allowAnyRequestedKernelFunction = toolCallBehavior.AllowAnyRequestedKernelFunction; + int maximumAutoInvokeAttempts = toolCallBehavior.MaximumAutoInvokeAttempts; + + if (requestIndex >= toolCallBehavior.MaximumUseAttempts) + { + // Don't add any tools as we've reached the maximum attempts limit. + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger.LogDebug("Maximum use ({MaximumUse}) reached.", toolCallBehavior.MaximumUseAttempts); + } + } + else + { + (tools, choice) = toolCallBehavior.ConfigureOptions(kernel); + } + + return new(tools, choice, autoInvoke, maximumAutoInvokeAttempts, allowAnyRequestedKernelFunction); + } + + private (IList? Tools, ChatToolChoice? Choice, bool AutoInvoke, FunctionChoiceBehaviorOptions? Options) ConfigureFunctionCalling(Kernel? kernel, int requestIndex, FunctionChoiceBehavior functionChoiceBehavior, ChatHistory chatHistory) + { + FunctionChoiceBehaviorConfiguration? config = this.FunctionCallsProcessor.GetConfiguration(functionChoiceBehavior, chatHistory, requestIndex, kernel); + + IList? tools = null; + ChatToolChoice? toolChoice = null; + bool autoInvoke = config?.AutoInvoke ?? false; + + if (config?.Functions is { Count: > 0 } functions) + { + if (config.Choice == FunctionChoice.Auto) + { + toolChoice = ChatToolChoice.CreateAutoChoice(); + } + else if (config.Choice == FunctionChoice.Required) + { + toolChoice = ChatToolChoice.CreateRequiredChoice(); + } + else if (config.Choice == FunctionChoice.None) + { + toolChoice = ChatToolChoice.CreateNoneChoice(); + } + else + { + throw new NotSupportedException($"Unsupported function choice '{config.Choice}'."); + } + + tools = []; + + foreach (var function in functions) + { + tools.Add(function.Metadata.ToOpenAIFunction().ToFunctionDefinition()); + } + } + + return new(tools, toolChoice, autoInvoke, config?.Options); + } + + /// + /// Processes non-function tool calls. + /// + /// All tool calls requested by AI model. + /// The chat history. + private void ProcessNonFunctionToolCalls(IEnumerable toolCalls, ChatHistory chatHistory) + { + var nonFunctionToolCalls = toolCalls.Where(toolCall => toolCall.Kind != ChatToolCallKind.Function); + + const string ErrorMessage = "Error: Tool call was not a function call."; + + foreach (var toolCall in nonFunctionToolCalls) + { + if (this.Logger!.IsEnabled(LogLevel.Debug)) + { + this.Logger!.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolCall.Id, ErrorMessage); + } + + // We currently only know about function tool calls. If it's anything else, we'll respond with an error. + var message = new ChatMessageContent(role: AuthorRole.Tool, content: ErrorMessage, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } }); + + chatHistory.Add(message); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs new file mode 100644 index 000000000000..0d698646fe80 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.Embeddings.cs @@ -0,0 +1,58 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Embeddings; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an embedding from the given . + /// + /// Target model to generate embeddings from + /// List of strings to generate embeddings for + /// The containing services, plugins, and other state for use throughout the operation. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The to monitor for cancellation requests. The default is . + /// List of embeddings + internal async Task>> GetEmbeddingsAsync( + string targetModel, + IList data, + Kernel? kernel, + int? dimensions, + CancellationToken cancellationToken) + { + var result = new List>(data.Count); + + if (data.Count > 0) + { + var embeddingsOptions = new EmbeddingGenerationOptions() + { + Dimensions = dimensions + }; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetEmbeddingClient(targetModel).GenerateEmbeddingsAsync(data, embeddingsOptions, cancellationToken)).ConfigureAwait(false); + var embeddings = response.Value; + + if (embeddings.Count != data.Count) + { + throw new KernelException($"Expected {data.Count} text embedding(s), but received {embeddings.Count}"); + } + + for (var i = 0; i < embeddings.Count; i++) + { + result.Add(embeddings[i].ToFloats()); + } + } + + return result; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs new file mode 100644 index 000000000000..0b5af67aaea7 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs @@ -0,0 +1,74 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Audio; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// Prompt to generate the image + /// Text to Audio execution settings for the prompt + /// The to monitor for cancellation requests. The default is . + /// Url of the generated image + internal async Task> GetAudioContentsAsync( + string targetModel, + string prompt, + PromptExecutionSettings? executionSettings, + CancellationToken cancellationToken) + { + Verify.NotNullOrWhiteSpace(prompt); + + OpenAITextToAudioExecutionSettings audioExecutionSettings = OpenAITextToAudioExecutionSettings.FromExecutionSettings(executionSettings); + + var (responseFormat, mimeType) = GetGeneratedSpeechFormatAndMimeType(audioExecutionSettings.ResponseFormat); + + SpeechGenerationOptions options = new() + { + ResponseFormat = responseFormat, + SpeedRatio = audioExecutionSettings.Speed, + }; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetAudioClient(targetModel).GenerateSpeechAsync(prompt, GetGeneratedSpeechVoice(audioExecutionSettings?.Voice), options, cancellationToken)).ConfigureAwait(false); + + return [new AudioContent(response.Value.ToArray(), mimeType)]; + } + + private static GeneratedSpeechVoice GetGeneratedSpeechVoice(string? voice) + => voice?.ToUpperInvariant() switch + { + "ALLOY" => GeneratedSpeechVoice.Alloy, + "ECHO" => GeneratedSpeechVoice.Echo, + "FABLE" => GeneratedSpeechVoice.Fable, + "ONYX" => GeneratedSpeechVoice.Onyx, + "NOVA" => GeneratedSpeechVoice.Nova, + "SHIMMER" => GeneratedSpeechVoice.Shimmer, + _ => throw new NotSupportedException($"The voice '{voice}' is not supported."), + }; + + private static (GeneratedSpeechFormat? Format, string? MimeType) GetGeneratedSpeechFormatAndMimeType(string? format) + { + switch (format?.ToUpperInvariant()) + { + case "WAV": return (GeneratedSpeechFormat.Wav, "audio/wav"); + case "MP3": return (GeneratedSpeechFormat.Mp3, "audio/mpeg"); + case "OPUS": return (GeneratedSpeechFormat.Opus, "audio/opus"); + case "FLAC": return (GeneratedSpeechFormat.Flac, "audio/flac"); + case "AAC": return (GeneratedSpeechFormat.Aac, "audio/aac"); + case "PCM": return (GeneratedSpeechFormat.Pcm, "audio/l16"); + case null: return (null, null); + default: throw new NotSupportedException($"The format '{format}' is not supported."); + } + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs new file mode 100644 index 000000000000..7d09f0805bb1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToImage.cs @@ -0,0 +1,159 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using OpenAI.Images; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// Prompt to generate the image + /// Width of the image + /// Height of the image + /// The to monitor for cancellation requests. The default is . + /// Url of the generated image + internal async Task GenerateImageAsync( + string? targetModel, + string prompt, + int width, + int height, + CancellationToken cancellationToken) + { + Verify.NotNullOrWhiteSpace(prompt); + + var size = new GeneratedImageSize(width, height); + + var imageOptions = new ImageGenerationOptions() + { + Size = size, + ResponseFormat = GeneratedImageFormat.Uri + }; + + // The model is not required by the OpenAI API and defaults to the DALL-E 2 server-side - https://platform.openai.com/docs/api-reference/images/create#images-create-model. + // However, considering that the model is required by the OpenAI SDK and the ModelId property is optional, it defaults to DALL-E 2 in the line below. + targetModel = string.IsNullOrEmpty(targetModel) ? "dall-e-2" : targetModel!; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetImageClient(targetModel).GenerateImageAsync(prompt, imageOptions, cancellationToken)).ConfigureAwait(false); + var generatedImage = response.Value; + + return generatedImage.ImageUri?.ToString() ?? throw new KernelException("The generated image is not in url format"); + } + + /// + /// Generates an image with the provided configuration. + /// + /// Model identifier + /// The input text content to generate the image + /// Execution settings for the image generation + /// Kernel instance + /// Cancellation token + /// List of image generated contents + internal async Task> GetImageContentsAsync( + string targetModel, + TextContent input, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + // Ensure the input is valid + Verify.NotNull(input); + + // Convert the generic execution settings to OpenAI-specific settings + var imageSettings = OpenAITextToImageExecutionSettings.FromExecutionSettings(executionSettings); + + var imageGenerationOptions = new ImageGenerationOptions() + { + Size = GetGeneratedImageSize(imageSettings.Size), + ResponseFormat = GetResponseFormat(imageSettings.ResponseFormat), + Style = GetGeneratedImageStyle(imageSettings.Style), + Quality = GetGeneratedImageQuality(imageSettings.Quality), + EndUserId = imageSettings.EndUserId, + }; + + ClientResult response = await RunRequestAsync(() => this.Client!.GetImageClient(targetModel).GenerateImageAsync(input.Text, imageGenerationOptions, cancellationToken)).ConfigureAwait(false); + var generatedImage = response.Value; + + List result = []; + if (generatedImage.ImageUri is not null) + { + result.Add(new ImageContent(uri: generatedImage.ImageUri) { InnerContent = generatedImage }); + } + else + { + result.Add(new ImageContent(generatedImage.ImageBytes, "image/png") { InnerContent = generatedImage }); + } + + return result; + } + + private static GeneratedImageSize? GetGeneratedImageSize((int Width, int Height)? size) + => size is null + ? null + : new GeneratedImageSize(size.Value.Width, size.Value.Height); + + private static GeneratedImageQuality? GetGeneratedImageQuality(string? quality) + { + if (quality is null) + { + return null; + } + + return quality.ToUpperInvariant() switch + { + "STANDARD" => GeneratedImageQuality.Standard, + "HIGH" or "HD" => GeneratedImageQuality.High, + _ => throw new NotSupportedException($"The provided quality '{quality}' is not supported.") + }; + } + + private static GeneratedImageStyle? GetGeneratedImageStyle(string? style) + { + if (style is null) + { + return null; + } + + return style.ToUpperInvariant() switch + { + "VIVID" => GeneratedImageStyle.Vivid, + "NATURAL" => GeneratedImageStyle.Natural, + _ => throw new NotSupportedException($"The provided style '{style}' is not supported.") + }; + } + + private static GeneratedImageFormat? GetResponseFormat(object? responseFormat) + { + if (responseFormat is null) + { + return null; + } + + if (responseFormat is GeneratedImageFormat format) + { + return format; + } + + if (responseFormat is string formatString) + { + return formatString.ToUpperInvariant() switch + { + "URI" or "URL" => GeneratedImageFormat.Uri, + "BYTES" or "B64_JSON" => GeneratedImageFormat.Bytes, + _ => throw new NotSupportedException($"The provided response format '{formatString}' is not supported.") + }; + } + + throw new NotSupportedException($"The provided response format type '{responseFormat.GetType()}' is not supported."); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs new file mode 100644 index 000000000000..a295c7876e69 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs @@ -0,0 +1,266 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Net.Http; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +#pragma warning disable IDE0005 // Using directive is unnecessary +using Microsoft.SemanticKernel.Connectors.FunctionCalling; +#pragma warning restore IDE0005 // Using directive is unnecessary +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Services; +using OpenAI; + +#pragma warning disable CA2208 // Instantiate argument exceptions correctly + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Base class for AI clients that provides common functionality for interacting with OpenAI services. +/// +internal partial class ClientCore +{ + /// + /// White space constant. + /// + private const string SingleSpace = " "; + + /// + /// Gets the attribute name used to store the organization in the dictionary. + /// + internal const string OrganizationKey = "Organization"; + + /// + /// Default OpenAI API endpoint. + /// + private const string OpenAIV1Endpoint = "https://api.openai.com/v1"; + + /// + /// Identifier of the default model to use + /// + protected internal string ModelId { get; init; } = string.Empty; + + /// + /// Non-default endpoint for OpenAI API. + /// + protected internal Uri? Endpoint { get; init; } + + /// + /// Logger instance + /// + protected internal ILogger? Logger { get; init; } + + /// + /// OpenAI Client + /// + protected internal OpenAIClient? Client { get; set; } + + /// + /// Storage for AI service attributes. + /// + internal Dictionary Attributes { get; } = []; + + /// + /// The function calls processor. + /// + protected FunctionCallsProcessor FunctionCallsProcessor { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// Model name. + /// OpenAI API Key. + /// OpenAI Organization Id (usually optional). + /// OpenAI compatible API endpoint. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + internal ClientCore( + string? modelId = null, + string? apiKey = null, + string? organizationId = null, + Uri? endpoint = null, + HttpClient? httpClient = null, + ILogger? logger = null) + { + this.FunctionCallsProcessor = new FunctionCallsProcessor(this.Logger); + + // Empty constructor will be used when inherited by a specialized Client. + if (modelId is null + && apiKey is null + && organizationId is null + && endpoint is null + && httpClient is null + && logger is null) + { + return; + } + + if (!string.IsNullOrWhiteSpace(modelId)) + { + this.ModelId = modelId!; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + this.Logger = logger ?? NullLogger.Instance; + + // Accepts the endpoint if provided, otherwise uses the default OpenAI endpoint. + this.Endpoint = endpoint ?? httpClient?.BaseAddress; + if (this.Endpoint is null) + { + Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided. + this.Endpoint = new Uri(OpenAIV1Endpoint); + } + else if (string.IsNullOrEmpty(apiKey)) + { + // Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key. + apiKey = SingleSpace; + } + + this.AddAttribute(AIServiceExtensions.EndpointKey, this.Endpoint.ToString()); + + var options = GetOpenAIClientOptions(httpClient, this.Endpoint); + if (!string.IsNullOrWhiteSpace(organizationId)) + { + options.AddPolicy(CreateRequestHeaderPolicy("OpenAI-Organization", organizationId!), PipelinePosition.PerCall); + + this.AddAttribute(ClientCore.OrganizationKey, organizationId); + } + + this.Client = new OpenAIClient(new ApiKeyCredential(apiKey!), options); + } + + /// + /// Initializes a new instance of the class using the specified OpenAIClient. + /// Note: instances created this way might not have the default diagnostics settings, + /// it's up to the caller to configure the client. + /// + /// OpenAI model Id + /// Custom . + /// The to use for logging. If null, no logging will be performed. + internal ClientCore( + string? modelId, + OpenAIClient openAIClient, + ILogger? logger = null) + { + // Model Id may not be required when other services. i.e: File Service. + if (modelId is not null) + { + this.ModelId = modelId; + this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + } + + Verify.NotNull(openAIClient); + + this.Logger = logger ?? NullLogger.Instance; + this.Client = openAIClient; + this.FunctionCallsProcessor = new FunctionCallsProcessor(this.Logger); + } + + /// + /// Logs OpenAI action details. + /// + /// Caller member name. Populated automatically by runtime. + internal void LogActionDetails([CallerMemberName] string? callerMemberName = default) + { + if (this.Logger!.IsEnabled(LogLevel.Information)) + { + this.Logger.LogInformation("Action: {Action}. OpenAI Model ID: {ModelId}.", callerMemberName, this.ModelId); + } + } + + /// + /// Allows adding attributes to the client. + /// + /// Attribute key. + /// Attribute value. + internal void AddAttribute(string key, string? value) + { + if (!string.IsNullOrEmpty(value)) + { + this.Attributes.Add(key, value); + } + } + + /// Gets options to use for an OpenAIClient + /// Custom for HTTP requests. + /// Endpoint for the OpenAI API. + /// An instance of . + private static OpenAIClientOptions GetOpenAIClientOptions(HttpClient? httpClient, Uri? endpoint) + { + OpenAIClientOptions options = new() + { + UserAgentApplicationId = HttpHeaderConstant.Values.UserAgent, + Endpoint = endpoint + }; + + options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(ClientCore))), PipelinePosition.PerCall); + + if (httpClient is not null) + { + options.Transport = new HttpClientPipelineTransport(httpClient); + options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided. + options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout + } + + return options; + } + + /// + /// Gets the model identifier to use for the client. + /// + protected virtual string GetClientModelId() + => this.ModelId; + + /// + /// Invokes the specified request and handles exceptions. + /// + /// Type of the response. + /// Request to invoke. + /// Returns the response. + protected static async Task RunRequestAsync(Func> request) + { + try + { + return await request.Invoke().ConfigureAwait(false); + } + catch (ClientResultException e) + { + throw e.ToHttpOperationException(); + } + } + + /// + /// Invokes the specified request and handles exceptions. + /// + /// Type of the response. + /// Request to invoke. + /// Returns the response. + protected static T RunRequest(Func request) + { + try + { + return request.Invoke(); + } + catch (ClientResultException e) + { + throw e.ToHttpOperationException(); + } + } + + protected static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue) + { + return new GenericActionPipelinePolicy((message) => + { + if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false) + { + message.Request.Headers.Set(headerName, headerValue); + } + }); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIChatMessageContent.cs similarity index 65% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIChatMessageContent.cs index d91f8e45fc40..3015fa09604f 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIChatMessageContent.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIChatMessageContent.cs @@ -2,8 +2,9 @@ using System.Collections.Generic; using System.Linq; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Chat; +using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -13,28 +14,28 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; public sealed class OpenAIChatMessageContent : ChatMessageContent { /// - /// Gets the metadata key for the name property. + /// Gets the metadata key for the tool id. /// - public static string ToolIdProperty => $"{nameof(ChatCompletionsToolCall)}.{nameof(ChatCompletionsToolCall.Id)}"; + public static string ToolIdProperty => "ChatCompletionsToolCall.Id"; /// - /// Gets the metadata key for the list of . + /// Gets the metadata key for the list of . /// - internal static string FunctionToolCallsProperty => $"{nameof(ChatResponseMessage)}.FunctionToolCalls"; + internal static string FunctionToolCallsProperty => "ChatResponseMessage.FunctionToolCalls"; /// /// Initializes a new instance of the class. /// - internal OpenAIChatMessageContent(ChatResponseMessage chatMessage, string modelId, IReadOnlyDictionary? metadata = null) - : base(new AuthorRole(chatMessage.Role.ToString()), chatMessage.Content, modelId, chatMessage, System.Text.Encoding.UTF8, CreateMetadataDictionary(chatMessage.ToolCalls, metadata)) + internal OpenAIChatMessageContent(OpenAIChatCompletion completion, string modelId, IReadOnlyDictionary? metadata = null) + : base(new AuthorRole(completion.Role.ToString()), CreateContentItems(completion.Content), modelId, completion, System.Text.Encoding.UTF8, CreateMetadataDictionary(completion.ToolCalls, metadata)) { - this.ToolCalls = chatMessage.ToolCalls; + this.ToolCalls = completion.ToolCalls; } /// /// Initializes a new instance of the class. /// - internal OpenAIChatMessageContent(ChatRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) + internal OpenAIChatMessageContent(ChatMessageRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) : base(new AuthorRole(role.ToString()), content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata)) { this.ToolCalls = toolCalls; @@ -43,16 +44,32 @@ internal OpenAIChatMessageContent(ChatRole role, string? content, string modelId /// /// Initializes a new instance of the class. /// - internal OpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) + internal OpenAIChatMessageContent(AuthorRole role, string? content, string modelId, IReadOnlyList toolCalls, IReadOnlyDictionary? metadata = null) : base(role, content, modelId, content, System.Text.Encoding.UTF8, CreateMetadataDictionary(toolCalls, metadata)) { this.ToolCalls = toolCalls; } + private static ChatMessageContentItemCollection CreateContentItems(IReadOnlyList contentUpdate) + { + ChatMessageContentItemCollection collection = []; + + foreach (var part in contentUpdate) + { + // We only support text content for now. + if (part.Kind == ChatMessageContentPartKind.Text) + { + collection.Add(new TextContent(part.Text)); + } + } + + return collection; + } + /// /// A list of the tools called by the model. /// - public IReadOnlyList ToolCalls { get; } + public IReadOnlyList ToolCalls { get; } /// /// Retrieve the resulting function from the chat result. @@ -64,9 +81,9 @@ public IReadOnlyList GetOpenAIFunctionToolCalls() foreach (var toolCall in this.ToolCalls) { - if (toolCall is ChatCompletionsFunctionToolCall functionToolCall) + if (toolCall.Kind == ChatToolCallKind.Function) { - (functionToolCallList ??= []).Add(new OpenAIFunctionToolCall(functionToolCall)); + (functionToolCallList ??= []).Add(new OpenAIFunctionToolCall(toolCall)); } } @@ -79,7 +96,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls() } private static IReadOnlyDictionary? CreateMetadataDictionary( - IReadOnlyList toolCalls, + IReadOnlyList toolCalls, IReadOnlyDictionary? original) { // We only need to augment the metadata if there are any tool calls. @@ -107,7 +124,7 @@ public IReadOnlyList GetOpenAIFunctionToolCalls() } // Add the additional entry. - newDictionary.Add(FunctionToolCallsProperty, toolCalls.OfType().ToList()); + newDictionary.Add(FunctionToolCallsProperty, toolCalls.Where(ctc => ctc.Kind == ChatToolCallKind.Function).ToList()); return newDictionary; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunction.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunction.cs index b51faa59c359..512277245fec 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunction.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunction.cs @@ -2,14 +2,10 @@ using System; using System.Collections.Generic; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; -// NOTE: Since this space is evolving rapidly, in order to reduce the risk of needing to take breaking -// changes as OpenAI's APIs evolve, these types are not externally constructible. In the future, once -// things stabilize, and if need demonstrates, we could choose to expose those constructors. - /// /// Represents a function parameter that can be passed to an OpenAI function tool call. /// @@ -127,11 +123,11 @@ internal OpenAIFunction( public OpenAIFunctionReturnParameter? ReturnParameter { get; } /// - /// Converts the representation to the Azure SDK's - /// representation. + /// Converts the representation to the OpenAI SDK's + /// representation. /// - /// A containing all the function information. - public FunctionDefinition ToFunctionDefinition() + /// A containing all the function information. + public ChatTool ToFunctionDefinition() { BinaryData resultParameters = s_zeroFunctionParametersSchema; @@ -159,12 +155,12 @@ public FunctionDefinition ToFunctionDefinition() }); } - return new FunctionDefinition - { - Name = this.FullyQualifiedName, - Description = this.Description, - Parameters = resultParameters, - }; + return ChatTool.CreateFunctionTool + ( + functionName: this.FullyQualifiedName, + functionDescription: this.Description, + functionParameters: resultParameters + ); } /// Gets a for a typeless parameter with the specified description, defaulting to typeof(string) diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunctionToolCall.cs similarity index 75% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunctionToolCall.cs index af4688e06df1..e6b4be602b39 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIFunctionToolCall.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIFunctionToolCall.cs @@ -5,7 +5,7 @@ using System.Diagnostics; using System.Text; using System.Text.Json; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -16,15 +16,15 @@ public sealed class OpenAIFunctionToolCall { private string? _fullyQualifiedFunctionName; - /// Initialize the from a . - internal OpenAIFunctionToolCall(ChatCompletionsFunctionToolCall functionToolCall) + /// Initialize the from a . + internal OpenAIFunctionToolCall(ChatToolCall functionToolCall) { Verify.NotNull(functionToolCall); - Verify.NotNull(functionToolCall.Name); + Verify.NotNull(functionToolCall.FunctionName); - string fullyQualifiedFunctionName = functionToolCall.Name; + string fullyQualifiedFunctionName = functionToolCall.FunctionName; string functionName = fullyQualifiedFunctionName; - string? arguments = functionToolCall.Arguments; + string? arguments = functionToolCall.FunctionArguments?.ToString(); string? pluginName = null; int separatorPos = fullyQualifiedFunctionName.IndexOf(OpenAIFunction.NameSeparator, StringComparison.Ordinal); @@ -89,65 +89,65 @@ public override string ToString() /// /// Tracks tooling updates from streaming responses. /// - /// The tool call update to incorporate. + /// The tool call updates to incorporate. /// Lazily-initialized dictionary mapping indices to IDs. /// Lazily-initialized dictionary mapping indices to names. /// Lazily-initialized dictionary mapping indices to arguments. internal static void TrackStreamingToolingUpdate( - StreamingToolCallUpdate? update, + IReadOnlyList? updates, ref Dictionary? toolCallIdsByIndex, ref Dictionary? functionNamesByIndex, ref Dictionary? functionArgumentBuildersByIndex) { - if (update is null) + if (updates is null) { // Nothing to track. return; } - // If we have an ID, ensure the index is being tracked. Even if it's not a function update, - // we want to keep track of it so we can send back an error. - if (update.Id is string id) + foreach (var update in updates) { - (toolCallIdsByIndex ??= [])[update.ToolCallIndex] = id; - } + // If we have an ID, ensure the index is being tracked. Even if it's not a function update, + // we want to keep track of it so we can send back an error. + if (update.ToolCallId is string id) + { + (toolCallIdsByIndex ??= [])[update.Index] = id; + } - if (update is StreamingFunctionToolCallUpdate ftc) - { // Ensure we're tracking the function's name. - if (ftc.Name is string name) + if (update.FunctionName is string name) { - (functionNamesByIndex ??= [])[ftc.ToolCallIndex] = name; + (functionNamesByIndex ??= [])[update.Index] = name; } // Ensure we're tracking the function's arguments. - if (ftc.ArgumentsUpdate is string argumentsUpdate) + if (update.FunctionArgumentsUpdate is not null) { - if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(ftc.ToolCallIndex, out StringBuilder? arguments)) + if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(update.Index, out StringBuilder? arguments)) { - functionArgumentBuildersByIndex[ftc.ToolCallIndex] = arguments = new(); + functionArgumentBuildersByIndex[update.Index] = arguments = new(); } - arguments.Append(argumentsUpdate); + arguments.Append(update.FunctionArgumentsUpdate.ToString()); } } } /// - /// Converts the data built up by into an array of s. + /// Converts the data built up by into an array of s. /// /// Dictionary mapping indices to IDs. /// Dictionary mapping indices to names. /// Dictionary mapping indices to arguments. - internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls( + internal static ChatToolCall[] ConvertToolCallUpdatesToFunctionToolCalls( ref Dictionary? toolCallIdsByIndex, ref Dictionary? functionNamesByIndex, ref Dictionary? functionArgumentBuildersByIndex) { - ChatCompletionsFunctionToolCall[] toolCalls = []; + ChatToolCall[] toolCalls = []; if (toolCallIdsByIndex is { Count: > 0 }) { - toolCalls = new ChatCompletionsFunctionToolCall[toolCallIdsByIndex.Count]; + toolCalls = new ChatToolCall[toolCallIdsByIndex.Count]; int i = 0; foreach (KeyValuePair toolCallIndexAndId in toolCallIdsByIndex) @@ -158,7 +158,7 @@ internal static ChatCompletionsFunctionToolCall[] ConvertToolCallUpdatesToChatCo functionNamesByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionName); functionArgumentBuildersByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionArguments); - toolCalls[i] = new ChatCompletionsFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, functionArguments?.ToString() ?? string.Empty); + toolCalls[i] = ChatToolCall.CreateFunctionToolCall(toolCallIndexAndId.Value, functionName ?? string.Empty, BinaryData.FromString(functionArguments?.ToString() ?? string.Empty)); i++; } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIJsonSchemaTransformer.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIJsonSchemaTransformer.cs new file mode 100644 index 000000000000..73a0fbfb711d --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIJsonSchemaTransformer.cs @@ -0,0 +1,71 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using JsonSchemaMapper; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// JSON Schema transformer to apply OpenAI conditions for structured outputs. +/// +/// - "additionalProperties" property must always be set to in objects. +/// More information here: . +/// +/// +/// - All fields must be "required". +/// More information here: . +/// +/// +internal static class OpenAIJsonSchemaTransformer +{ + private const string AdditionalPropertiesPropertyName = "additionalProperties"; + private const string TypePropertyName = "type"; + private const string ObjectValueName = "object"; + private const string PropertiesPropertyName = "properties"; + private const string RequiredPropertyName = "required"; + + internal static JsonNode Transform(JsonSchemaGenerationContext context, JsonNode schema) + { + // Transform schema if node is object only. + if (schema is JsonObject jsonSchemaObject) + { + var types = GetTypes(jsonSchemaObject); + + if (types is not null && types.Contains(ObjectValueName)) + { + // Set "additionalProperties" to "false". + jsonSchemaObject[AdditionalPropertiesPropertyName] = false; + + // Specify all properties as "required". + if (jsonSchemaObject.TryGetPropertyValue(PropertiesPropertyName, out var properties) && + properties is JsonObject propertiesObject) + { + var propertyNames = propertiesObject.Select(l => (JsonNode)l.Key).ToArray(); + + jsonSchemaObject[RequiredPropertyName] = new JsonArray(propertyNames); + } + } + } + + return schema; + } + + private static List? GetTypes(JsonObject jsonObject) + { + if (jsonObject.TryGetPropertyValue(TypePropertyName, out var typeProperty) && typeProperty is not null) + { + // For cases when "type" has an array value (e.g "type": "["object", "null"]"). + if (typeProperty is JsonArray nodeArray) + { + return nodeArray.ToArray().Select(element => element?.GetValue()).ToList(); + } + + // Case when "type" has a string value (e.g. "type": "object"). + return [typeProperty.GetValue()]; + } + + return null; + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs similarity index 55% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs index fa3845782d0a..3f9c413de5be 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIStreamingChatMessageContent.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/OpenAIStreamingChatMessageContent.cs @@ -1,14 +1,15 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Text; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// -/// Azure OpenAI and OpenAI Specialized streaming chat message content. +/// OpenAI specialized streaming chat message content. /// /// /// Represents a chat message content chunk that was streamed from the remote model. @@ -18,31 +19,53 @@ public sealed class OpenAIStreamingChatMessageContent : StreamingChatMessageCont /// /// The reason why the completion finished. /// - public CompletionsFinishReason? FinishReason { get; set; } + public ChatFinishReason? FinishReason { get; set; } /// /// Create a new instance of the class. /// - /// Internal Azure SDK Message update representation + /// Internal OpenAI SDK Message update representation /// Index of the choice /// The model ID used to generate the content /// Additional metadata internal OpenAIStreamingChatMessageContent( - StreamingChatCompletionsUpdate chatUpdate, + StreamingChatCompletionUpdate chatUpdate, int choiceIndex, string modelId, IReadOnlyDictionary? metadata = null) : base( - chatUpdate.Role.HasValue ? new AuthorRole(chatUpdate.Role.Value.ToString()) : null, - chatUpdate.ContentUpdate, + null, + null, chatUpdate, choiceIndex, modelId, Encoding.UTF8, metadata) { - this.ToolCallUpdate = chatUpdate.ToolCallUpdate; - this.FinishReason = chatUpdate?.FinishReason; + try + { + this.FinishReason = chatUpdate.FinishReason; + + if (chatUpdate.Role.HasValue) + { + this.Role = new AuthorRole(chatUpdate.Role.ToString()!); + } + + if (chatUpdate.ToolCallUpdates is not null) + { + this.ToolCallUpdates = chatUpdate.ToolCallUpdates; + } + + if (chatUpdate.ContentUpdate is not null) + { + this.Items = CreateContentItems(chatUpdate.ContentUpdate); + } + } + catch (NullReferenceException) + { + // Temporary workaround for OpenAI SDK Bug here: https://github.com/openai/openai-dotnet/issues/198 + // TODO: Remove this try-catch block once the bug is fixed. + } } /// @@ -50,7 +73,7 @@ internal OpenAIStreamingChatMessageContent( /// /// Author role of the message /// Content of the message - /// Tool call update + /// Tool call updates /// Completion finish reason /// Index of the choice /// The model ID used to generate the content @@ -58,8 +81,8 @@ internal OpenAIStreamingChatMessageContent( internal OpenAIStreamingChatMessageContent( AuthorRole? authorRole, string? content, - StreamingToolCallUpdate? tootToolCallUpdate = null, - CompletionsFinishReason? completionsFinishReason = null, + IReadOnlyList? toolCallUpdates = null, + ChatFinishReason? completionsFinishReason = null, int choiceIndex = 0, string? modelId = null, IReadOnlyDictionary? metadata = null) @@ -72,16 +95,32 @@ internal OpenAIStreamingChatMessageContent( Encoding.UTF8, metadata) { - this.ToolCallUpdate = tootToolCallUpdate; + this.ToolCallUpdates = toolCallUpdates; this.FinishReason = completionsFinishReason; } /// Gets any update information in the message about a tool call. - public StreamingToolCallUpdate? ToolCallUpdate { get; } + public IReadOnlyList? ToolCallUpdates { get; } /// public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString()); /// public override string ToString() => this.Content ?? string.Empty; + + private static StreamingKernelContentItemCollection CreateContentItems(IReadOnlyList contentUpdate) + { + StreamingKernelContentItemCollection collection = []; + + foreach (var content in contentUpdate) + { + // We only support text content for now. + if (content.Kind == ChatMessageContentPartKind.Text) + { + collection.Add(new StreamingTextContent(content.Text)); + } + } + + return collection; + } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs deleted file mode 100644 index 320a7b213bb3..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/CustomClient/OpenAITextToImageClientCore.cs +++ /dev/null @@ -1,114 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Text; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// Base type for OpenAI text to image clients. -internal sealed class OpenAITextToImageClientCore -{ - /// - /// Initializes a new instance of the class. - /// - /// The HttpClient used for making HTTP requests. - /// The to use for logging. If null, no logging will be performed. - internal OpenAITextToImageClientCore(HttpClient? httpClient, ILogger? logger = null) - { - this._httpClient = HttpClientProvider.GetHttpClient(httpClient); - this._logger = logger ?? NullLogger.Instance; - } - - /// - /// Storage for AI service attributes. - /// - internal Dictionary Attributes { get; } = []; - - /// - /// Run the HTTP request to generate a list of images - /// - /// URL for the text to image request API - /// Request payload - /// Function to invoke to extract the desired portion of the text to image response. - /// The to monitor for cancellation requests. The default is . - /// List of image URLs - [Experimental("SKEXP0010")] - internal async Task> ExecuteImageGenerationRequestAsync( - string url, - string requestBody, - Func extractResponseFunc, - CancellationToken cancellationToken = default) - { - var result = await this.ExecutePostRequestAsync(url, requestBody, cancellationToken).ConfigureAwait(false); - return result.Images.Select(extractResponseFunc).ToList(); - } - - /// - /// Add attribute to the internal attribute dictionary if the value is not null or empty. - /// - /// Attribute key - /// Attribute value - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this.Attributes.Add(key, value); - } - } - - /// - /// Logger - /// - private readonly ILogger _logger; - - /// - /// The HttpClient used for making HTTP requests. - /// - private readonly HttpClient _httpClient; - - internal async Task ExecutePostRequestAsync(string url, string requestBody, CancellationToken cancellationToken = default) - { - using var content = new StringContent(requestBody, Encoding.UTF8, "application/json"); - using var response = await this.ExecuteRequestAsync(url, HttpMethod.Post, content, cancellationToken).ConfigureAwait(false); - string responseJson = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - T result = JsonSerializer.Deserialize(responseJson, JsonOptionsCache.ReadPermissive) ?? throw new KernelException("Response JSON parse error"); - return result; - } - - internal event EventHandler? RequestCreated; - - internal async Task ExecuteRequestAsync(string url, HttpMethod method, HttpContent? content, CancellationToken cancellationToken = default) - { - using var request = new HttpRequestMessage(method, url); - - if (content is not null) - { - request.Content = content; - } - - request.Headers.Add("User-Agent", HttpHeaderConstant.Values.UserAgent); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAITextToImageClientCore))); - - this.RequestCreated?.Invoke(this, request); - - var response = await this._httpClient.SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - - if (this._logger.IsEnabled(LogLevel.Debug)) - { - this._logger.LogDebug("HTTP response: {0} {1}", (int)response.StatusCode, response.StatusCode.ToString("G")); - } - - return response; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/ChatHistoryExtensions.cs similarity index 90% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Extensions/ChatHistoryExtensions.cs index b4466a30af90..47697609aebc 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ChatHistoryExtensions.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/ChatHistoryExtensions.cs @@ -12,7 +12,7 @@ namespace Microsoft.SemanticKernel; /// /// Chat history extensions. /// -public static class ChatHistoryExtensions +public static class OpenAIChatHistoryExtensions { /// /// Add a message to the chat history at the end of the streamed message @@ -43,7 +43,7 @@ public static async IAsyncEnumerable AddStreamingMe (contentBuilder ??= new()).Append(contentUpdate); } - OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatMessage.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + OpenAIFunctionToolCall.TrackStreamingToolingUpdate(chatMessage.ToolCallUpdates, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); // Is always expected to have at least one chunk with the role provided from a streaming message streamedRole ??= chatMessage.Role; @@ -62,7 +62,7 @@ public static async IAsyncEnumerable AddStreamingMe role, contentBuilder?.ToString() ?? string.Empty, messageContents[0].ModelId!, - OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex), + OpenAIFunctionToolCall.ConvertToolCallUpdatesToFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex), metadata) { AuthorName = streamedName }); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs new file mode 100644 index 000000000000..c322ead2b671 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs @@ -0,0 +1,371 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; + +#pragma warning disable IDE0039 // Use local function + +namespace Microsoft.SemanticKernel; + +/// +/// Sponsor extensions class for . +/// +public static class OpenAIKernelBuilderExtensions +{ + #region Text Embedding + /// + /// Adds to the . + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextEmbeddingGeneration( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null, + int? dimensions = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService(), + dimensions)); + + return builder; + } + #endregion + + #region Text to Image + /// + /// Add the OpenAI Dall-E text to image service to the list + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// The model to use for image generation. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextToImage( + this IKernelBuilder builder, + string apiKey, + string? orgId = null, + string? modelId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToImageService( + apiKey, + orgId, + modelId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + #endregion + + #region Text to Audio + + /// + /// Adds the OpenAI text-to-audio service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAITextToAudio( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToAudioService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + #endregion + + #region Audio-to-Text + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAIAudioToText( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAIAudioToText( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + Func factory = (serviceProvider, _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, factory); + + return builder; + } + + #endregion + + #region Files + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + [Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations.")] + [ExcludeFromCodeCoverage] + public static IKernelBuilder AddOpenAIFiles( + this IKernelBuilder builder, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(apiKey); + + builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAIFileService( + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService())); + + return builder; + } + + #endregion + + #region Chat Completion + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + + return builder; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + + return builder; + } + + /// + /// Adds the Custom Endpoint OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// Custom OpenAI Compatible Message API endpoint + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The HttpClient to use with this service. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IKernelBuilder AddOpenAIChatCompletion( + this IKernelBuilder builder, + string modelId, + Uri endpoint, + string? apiKey, + string? orgId = null, + string? serviceId = null, + HttpClient? httpClient = null) + { + Verify.NotNull(builder); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId: modelId, + apiKey: apiKey, + endpoint: endpoint, + organization: orgId, + httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), + loggerFactory: serviceProvider.GetService()); + + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + builder.Services.AddKeyedSingleton(serviceId, (Func)Factory); + + return builder; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelFunctionMetadataExtensions.cs similarity index 98% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelFunctionMetadataExtensions.cs index 6859e1225dd6..a0982942b222 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIKernelFunctionMetadataExtensions.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelFunctionMetadataExtensions.cs @@ -10,7 +10,7 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; public static class OpenAIKernelFunctionMetadataExtensions { /// - /// Convert a to an . + /// Convert a to an . /// /// The object to convert. /// An object. diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs new file mode 100644 index 000000000000..0ac425a15593 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIMemoryBuilderExtensions.cs @@ -0,0 +1,44 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.Memory; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Provides extension methods for the class to configure OpenAI connector. +/// +public static class OpenAIMemoryBuilderExtensions +{ + /// + /// Adds the OpenAI text embeddings service. + /// See https://platform.openai.com/docs for service details. + /// + /// The instance + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// Custom for HTTP requests. + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// Self instance + [Experimental("SKEXP0010")] + public static MemoryBuilder WithOpenAITextEmbeddingGeneration( + this MemoryBuilder builder, + string modelId, + string apiKey, + string? orgId = null, + HttpClient? httpClient = null, + int? dimensions = null) + { + return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), + loggerFactory, + dimensions)); + } +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIPluginCollectionExtensions.cs similarity index 97% rename from dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIPluginCollectionExtensions.cs index 135b17b83df3..91da7138f9e4 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/OpenAIPluginCollectionExtensions.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIPluginCollectionExtensions.cs @@ -1,7 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.Diagnostics.CodeAnalysis; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -20,7 +20,7 @@ public static class OpenAIPluginCollectionExtensions /// if the function was found; otherwise, . public static bool TryGetFunctionAndArguments( this IReadOnlyKernelPluginCollection plugins, - ChatCompletionsFunctionToolCall functionToolCall, + ChatToolCall functionToolCall, [NotNullWhen(true)] out KernelFunction? function, out KernelArguments? arguments) => plugins.TryGetFunctionAndArguments(new OpenAIFunctionToolCall(functionToolCall), out function, out arguments); diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs new file mode 100644 index 000000000000..ed191d3dda0f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.cs @@ -0,0 +1,345 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.Embeddings; +using Microsoft.SemanticKernel.Http; +using Microsoft.SemanticKernel.TextGeneration; +using Microsoft.SemanticKernel.TextToAudio; +using Microsoft.SemanticKernel.TextToImage; +using OpenAI; + +namespace Microsoft.SemanticKernel; + +#pragma warning disable IDE0039 // Use local function + +/// +/// Sponsor extensions class for . +/// +public static class OpenAIServiceCollectionExtensions +{ + #region Text Embedding + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextEmbeddingGeneration( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null, + int? dimensions = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService(), + dimensions)); + } + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// The OpenAI model id. + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextEmbeddingGeneration(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null, + int? dimensions = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextEmbeddingGenerationService( + modelId, + openAIClient ?? serviceProvider.GetRequiredService(), + serviceProvider.GetService(), + dimensions)); + } + #endregion + + #region Text to Image + /// + /// Add the OpenAI Dall-E text to image service to the list + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// The model to use for image generation. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services, + string apiKey, + string? orgId = null, + string? modelId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToImageService( + apiKey, + orgId, + modelId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + #endregion + + #region Text to Audio + + /// + /// Adds the OpenAI text-to-audio service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAITextToAudio( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAITextToAudioService( + modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + } + + #endregion + + #region Audio-to-Text + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAIAudioToText( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + Func factory = (serviceProvider, _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + + /// + /// Adds the OpenAI audio-to-text service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAIAudioToText( + this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + Func factory = (serviceProvider, _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, factory); + + return services; + } + #endregion + + #region Files + + /// + /// Adds the to the . + /// + /// The instance to augment. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + [Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations.")] + [ExcludeFromCodeCoverage] + public static IServiceCollection AddOpenAIFiles( + this IServiceCollection services, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(apiKey); + + services.AddKeyedSingleton(serviceId, (serviceProvider, _) => + new OpenAIFileService( + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService())); + + return services; + } + + #endregion + + #region Chat Completion + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAIChatCompletion( + this IServiceCollection services, + string modelId, + string apiKey, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + Verify.NotNullOrWhiteSpace(apiKey); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model id + /// to use for the service. If null, one must be available in the service provider when this service is resolved. + /// A local identifier for the given AI service + /// The same instance as . + public static IServiceCollection AddOpenAIChatCompletion(this IServiceCollection services, + string modelId, + OpenAIClient? openAIClient = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + /// + /// Adds the Custom OpenAI chat completion service to the list. + /// + /// The instance to augment. + /// OpenAI model name, see https://platform.openai.com/docs/models + /// A Custom Message API compatible endpoint. + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// A local identifier for the given AI service + /// The same instance as . + [Experimental("SKEXP0010")] + public static IServiceCollection AddOpenAIChatCompletion( + this IServiceCollection services, + string modelId, + Uri endpoint, + string? apiKey = null, + string? orgId = null, + string? serviceId = null) + { + Verify.NotNull(services); + Verify.NotNullOrWhiteSpace(modelId); + + OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _) => + new(modelId, + endpoint, + apiKey, + orgId, + HttpClientProvider.GetHttpClient(serviceProvider), + serviceProvider.GetService()); + + services.AddKeyedSingleton(serviceId, (Func)Factory); + services.AddKeyedSingleton(serviceId, (Func)Factory); + + return services; + } + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFilePurpose.cs similarity index 95% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFilePurpose.cs index 8d87720fa89f..523b84dbe333 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFilePurpose.cs @@ -10,6 +10,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// https://platform.openai.com/docs/api-reference/files/object#files/object-purpose /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public readonly struct OpenAIFilePurpose : IEquatable { /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileReference.cs b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFileReference.cs similarity index 84% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileReference.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFileReference.cs index 371be0d93a33..e50a9185c20c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileReference.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Models/OpenAIFileReference.cs @@ -9,6 +9,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// References an uploaded file by id. /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public sealed class OpenAIFileReference { /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs deleted file mode 100644 index 2a3d2ce7dd61..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIMemoryBuilderExtensions.cs +++ /dev/null @@ -1,111 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using Azure.Core; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.Memory; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Provides extension methods for the class to configure OpenAI and AzureOpenAI connectors. -/// -public static class OpenAIMemoryBuilderExtensions -{ - /// - /// Adds an Azure OpenAI text embeddings service. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Model identifier - /// Custom for HTTP requests. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// Self instance - [Experimental("SKEXP0010")] - public static MemoryBuilder WithAzureOpenAITextEmbeddingGeneration( - this MemoryBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), - loggerFactory, - dimensions)); - } - - /// - /// Adds an Azure OpenAI text embeddings service. - /// See https://learn.microsoft.com/azure/cognitive-services/openai for service details. - /// - /// The instance - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// Custom for HTTP requests. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// Self instance - [Experimental("SKEXP0010")] - public static MemoryBuilder WithAzureOpenAITextEmbeddingGeneration( - this MemoryBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credential, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - credential, - modelId, - HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), - loggerFactory, - dimensions)); - } - - /// - /// Adds the OpenAI text embeddings service. - /// See https://platform.openai.com/docs for service details. - /// - /// The instance - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// Custom for HTTP requests. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// Self instance - [Experimental("SKEXP0010")] - public static MemoryBuilder WithOpenAITextEmbeddingGeneration( - this MemoryBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - return builder.WithTextEmbeddingGeneration((loggerFactory, builderHttpClient) => - new OpenAITextEmbeddingGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient ?? builderHttpClient), - loggerFactory, - dimensions)); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs deleted file mode 100644 index 80cc60944965..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs +++ /dev/null @@ -1,2042 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.AudioToText; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.Http; -using Microsoft.SemanticKernel.TextGeneration; -using Microsoft.SemanticKernel.TextToAudio; -using Microsoft.SemanticKernel.TextToImage; - -#pragma warning disable CA2000 // Dispose objects before losing scope -#pragma warning disable IDE0039 // Use local function - -namespace Microsoft.SemanticKernel; - -/// -/// Provides extension methods for and related classes to configure OpenAI and Azure OpenAI connectors. -/// -public static class OpenAIServiceCollectionExtensions -{ - #region Text Completion - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAITextGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, new AzureKeyCredential(apiKey), httpClient ?? serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - - return builder; - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAITextGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, new AzureKeyCredential(apiKey), serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAITextGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, credentials, httpClient ?? serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - - return builder; - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAITextGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - { - var client = CreateAzureOpenAIClient(endpoint, credentials, serviceProvider.GetService()); - return new AzureOpenAITextGenerationService(deploymentName, client, modelId, serviceProvider.GetService()); - }); - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IKernelBuilder AddAzureOpenAITextGeneration( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds an Azure OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAITextGeneration( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddOpenAITextGeneration( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAITextGeneration( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IKernelBuilder AddOpenAITextGeneration( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds an OpenAI text generation service with the specified configuration. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAITextGeneration(this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService())); - } - - #endregion - - #region Text Embedding - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credential, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credential); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - credential, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credential, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credential); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - endpoint, - credential, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds an Azure OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextEmbeddingGenerationService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAITextEmbeddingGeneration( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - dimensions)); - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAITextEmbeddingGeneration( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null, - int? dimensions = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService(), - dimensions)); - - return builder; - } - - /// - /// Adds the OpenAI text embeddings service to the list. - /// - /// The instance to augment. - /// The OpenAI model id. - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The number of dimensions the resulting output embeddings should have. Only supported in "text-embedding-3" and later models. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAITextEmbeddingGeneration(this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null, - int? dimensions = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextEmbeddingGenerationService( - modelId, - openAIClient ?? serviceProvider.GetRequiredService(), - serviceProvider.GetService(), - dimensions)); - } - - #endregion - - #region Chat Completion - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(serviceProvider)); - - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI chat completion with data service to the list. - /// - /// The instance. - /// Required configuration for Azure OpenAI chat completion with data. - /// A local identifier for the given AI service. - /// The same instance as . - /// - /// More information: - /// - [Experimental("SKEXP0010")] - [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] - public static IKernelBuilder AddAzureOpenAIChatCompletion( - this IKernelBuilder builder, - AzureOpenAIChatCompletionWithDataConfig config, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNull(config); - - Func factory = (serviceProvider, _) => - new(config, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI chat completion with data service to the list. - /// - /// The instance. - /// Required configuration for Azure OpenAI chat completion with data. - /// A local identifier for the given AI service. - /// The same instance as . - /// - /// More information: - /// - [Experimental("SKEXP0010")] - [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] - public static IServiceCollection AddAzureOpenAIChatCompletion( - this IServiceCollection services, - AzureOpenAIChatCompletionWithDataConfig config, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNull(config); - - Func factory = (serviceProvider, _) => - new(config, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - public static IKernelBuilder AddOpenAIChatCompletion( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAIChatCompletion( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IKernelBuilder AddOpenAIChatCompletion( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - public static IServiceCollection AddOpenAIChatCompletion(this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Custom OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// A Custom Message API compatible endpoint. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAIChatCompletion( - this IServiceCollection services, - string modelId, - Uri endpoint, - string? apiKey = null, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, - endpoint, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Custom Endpoint OpenAI chat completion service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// Custom OpenAI Compatible Message API endpoint - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAIChatCompletion( - this IKernelBuilder builder, - string modelId, - Uri endpoint, - string? apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId: modelId, - apiKey: apiKey, - endpoint: endpoint, - organization: orgId, - httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - loggerFactory: serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - #endregion - - #region Images - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// A local identifier for the given AI service - /// Azure OpenAI API version - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextToImage( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? modelId = null, - string? serviceId = null, - string? apiVersion = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - credentials, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - apiVersion)); - } - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// A local identifier for the given AI service - /// Azure OpenAI API version - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextToImage( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? modelId = null, - string? serviceId = null, - string? apiVersion = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - credentials, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService(), - apiVersion)); - - return builder; - } - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// Model identifier - /// A local identifier for the given AI service - /// Azure OpenAI API version - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextToImage( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - string? serviceId = null, - string? apiVersion = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService(), - apiVersion)); - - return builder; - } - - /// - /// Add the Azure OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// A local identifier for the given AI service - /// Model identifier - /// Maximum number of attempts to retrieve the text to image operation result. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextToImage( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - int maxRetryCount = 5) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The model to use for image generation. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAITextToImage( - this IKernelBuilder builder, - string apiKey, - string? orgId = null, - string? modelId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToImageService( - apiKey, - orgId, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The model to use for image generation. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services, - string apiKey, - string? orgId = null, - string? modelId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToImageService( - apiKey, - orgId, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// Model identifier - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAITextToImage( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? modelId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - } - - /// - /// Add the OpenAI Dall-E text to image service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// Model identifier - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAITextToImage( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? modelId = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToImageService( - deploymentName, - openAIClient ?? serviceProvider.GetRequiredService(), - modelId, - serviceProvider.GetService())); - - return builder; - } - - #endregion - - #region Files - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddOpenAIFiles( - this IKernelBuilder builder, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddOpenAIFiles( - this IServiceCollection services, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(apiKey); - - services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - - return services; - } - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment URL - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The API version to target. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0010")] - public static IKernelBuilder AddAzureOpenAIFiles( - this IKernelBuilder builder, - string endpoint, - string apiKey, - string? orgId = null, - string? version = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - new Uri(endpoint), - apiKey, - orgId, - version, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Add the OpenAI file service to the list - /// - /// The instance to augment. - /// Azure OpenAI deployment URL - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The API version to target. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0010")] - public static IServiceCollection AddAzureOpenAIFiles( - this IServiceCollection services, - string endpoint, - string apiKey, - string? orgId = null, - string? version = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(apiKey); - - services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAIFileService( - new Uri(endpoint), - apiKey, - orgId, - version, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - - return services; - } - - #endregion - - #region Text-to-Audio - - /// - /// Adds the Azure OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// A local identifier for the given AI service - /// Model identifier - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAITextToAudio( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToAudioService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds the Azure OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// A local identifier for the given AI service - /// Model identifier - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAITextToAudio( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new AzureOpenAITextToAudioService( - deploymentName, - endpoint, - apiKey, - modelId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - /// - /// Adds the OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddOpenAITextToAudio( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToAudioService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService())); - - return builder; - } - - /// - /// Adds the OpenAI text-to-audio service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddOpenAITextToAudio( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - return services.AddKeyedSingleton(serviceId, (serviceProvider, _) => - new OpenAITextToAudioService( - modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService())); - } - - #endregion - - #region Audio-to-Text - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAIAudioToText( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAIAudioToText( - this IServiceCollection services, - string deploymentName, - string endpoint, - string apiKey, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - new AzureKeyCredential(apiKey), - HttpClientProvider.GetHttpClient(serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAIAudioToText( - this IKernelBuilder builder, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAIAudioToText( - this IServiceCollection services, - string deploymentName, - string endpoint, - TokenCredential credentials, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - Verify.NotNullOrWhiteSpace(endpoint); - Verify.NotNull(credentials); - - Func factory = (serviceProvider, _) => - { - OpenAIClient client = CreateAzureOpenAIClient( - endpoint, - credentials, - HttpClientProvider.GetHttpClient(serviceProvider)); - return new(deploymentName, client, modelId, serviceProvider.GetService()); - }; - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddAzureOpenAIAudioToText( - this IKernelBuilder builder, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the Azure OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// Model identifier, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddAzureOpenAIAudioToText( - this IServiceCollection services, - string deploymentName, - OpenAIClient? openAIClient = null, - string? serviceId = null, - string? modelId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(deploymentName); - - Func factory = (serviceProvider, _) => - new(deploymentName, openAIClient ?? serviceProvider.GetRequiredService(), modelId, serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The HttpClient to use with this service. - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddOpenAIAudioToText( - this IKernelBuilder builder, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null, - HttpClient? httpClient = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(httpClient, serviceProvider), - serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model name, see https://platform.openai.com/docs/models - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddOpenAIAudioToText( - this IServiceCollection services, - string modelId, - string apiKey, - string? orgId = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - Verify.NotNullOrWhiteSpace(apiKey); - - Func factory = (serviceProvider, _) => - new(modelId, - apiKey, - orgId, - HttpClientProvider.GetHttpClient(serviceProvider), - serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IKernelBuilder AddOpenAIAudioToText( - this IKernelBuilder builder, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(builder); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - builder.Services.AddKeyedSingleton(serviceId, factory); - - return builder; - } - - /// - /// Adds the OpenAI audio-to-text service to the list. - /// - /// The instance to augment. - /// OpenAI model id - /// to use for the service. If null, one must be available in the service provider when this service is resolved. - /// A local identifier for the given AI service - /// The same instance as . - [Experimental("SKEXP0001")] - public static IServiceCollection AddOpenAIAudioToText( - this IServiceCollection services, - string modelId, - OpenAIClient? openAIClient = null, - string? serviceId = null) - { - Verify.NotNull(services); - Verify.NotNullOrWhiteSpace(modelId); - - Func factory = (serviceProvider, _) => - new(modelId, openAIClient ?? serviceProvider.GetRequiredService(), serviceProvider.GetService()); - - services.AddKeyedSingleton(serviceId, factory); - - return services; - } - - #endregion - - private static OpenAIClient CreateAzureOpenAIClient(string endpoint, AzureKeyCredential credentials, HttpClient? httpClient) => - new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient)); - - private static OpenAIClient CreateAzureOpenAIClient(string endpoint, TokenCredential credentials, HttpClient? httpClient) => - new(new Uri(endpoint), credentials, ClientCore.GetOpenAIClientOptions(httpClient)); -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIAudioToTextService.cs similarity index 62% rename from dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIAudioToTextService.cs index 3bebb4867af8..331da48cc08c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIAudioToTextService.cs @@ -5,27 +5,28 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.AudioToText; -using Microsoft.SemanticKernel.Services; +using OpenAI; namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// -/// OpenAI audio-to-text service. +/// OpenAI text-to-audio service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class OpenAIAudioToTextService : IAudioToTextService { - /// Core implementation shared by OpenAI services. - private readonly OpenAIClientCore _core; + /// + /// OpenAI text-to-audio client for HTTP operations. + /// + private readonly ClientCore _client; /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// - /// Creates an instance of the with API key auth. + /// Initializes a new instance of the class. /// /// Model name /// OpenAI API Key @@ -39,19 +40,12 @@ public OpenAIAudioToTextService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._core = new( - modelId: modelId, - apiKey: apiKey, - organization: organization, - httpClient: httpClient, - logger: loggerFactory?.CreateLogger(typeof(OpenAIAudioToTextService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); + Verify.NotNullOrWhiteSpace(modelId, nameof(modelId)); + this._client = new(modelId, apiKey, organization, null, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIAudioToTextService))); } /// - /// Creates an instance of the using the specified . + /// Initializes a new instance of the class. /// /// Model name /// Custom for HTTP requests. @@ -61,9 +55,8 @@ public OpenAIAudioToTextService( OpenAIClient openAIClient, ILoggerFactory? loggerFactory = null) { - this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAIAudioToTextService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); + Verify.NotNullOrWhiteSpace(modelId, nameof(modelId)); + this._client = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextToAudioService))); } /// @@ -72,5 +65,5 @@ public Task> GetTextContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetTextContentFromAudioAsync(content, executionSettings, cancellationToken); + => this._client.GetTextFromAudioContentsAsync(this._client.ModelId, content, executionSettings, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs similarity index 61% rename from dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs index a9f617efed73..a3f8d96d6e51 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs @@ -6,11 +6,13 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Services; using Microsoft.SemanticKernel.TextGeneration; +using OpenAI; + +#pragma warning disable CA1862 // Use the 'StringComparison' method overloads to perform case-insensitive string comparisons +#pragma warning disable RCS1155 // Use StringComparison when comparing strings namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -19,7 +21,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// public sealed class OpenAIChatCompletionService : IChatCompletionService, ITextGenerationService { - private readonly OpenAIClientCore _core; + /// Core implementation shared by OpenAI clients. + private readonly ClientCore _client; /// /// Create an instance of the OpenAI chat completion connector @@ -37,16 +40,13 @@ public OpenAIChatCompletionService( ILoggerFactory? loggerFactory = null ) { - this._core = new( + this._client = new( modelId, apiKey, - endpoint: null, organization, + endpoint: null, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); } /// @@ -67,30 +67,13 @@ public OpenAIChatCompletionService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - Uri? internalClientEndpoint = null; - var providedEndpoint = endpoint ?? httpClient?.BaseAddress; - if (providedEndpoint is not null) - { - // If the provided endpoint does not have a path specified, updates it to the default Message API Chat Completions endpoint - internalClientEndpoint = providedEndpoint.PathAndQuery == "/" ? - new Uri(providedEndpoint, "v1/chat/completions") - : providedEndpoint; - } - - this._core = new( + this._client = new( modelId, apiKey, - internalClientEndpoint, organization, + endpoint ?? httpClient?.BaseAddress, httpClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); - - if (providedEndpoint is not null) - { - this._core.AddAttribute(AIServiceExtensions.EndpointKey, providedEndpoint.ToString()); - } - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); } /// @@ -104,30 +87,44 @@ public OpenAIChatCompletionService( OpenAIClient openAIClient, ILoggerFactory? loggerFactory = null) { - this._core = new( + this._client = new( modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAIChatCompletionService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// - public Task> GetChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + public Task> GetChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetChatMessageContentsAsync(this._client.ModelId, chatHistory, executionSettings, kernel, cancellationToken); /// - public IAsyncEnumerable GetStreamingChatMessageContentsAsync(ChatHistory chatHistory, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken); + public IAsyncEnumerable GetStreamingChatMessageContentsAsync( + ChatHistory chatHistory, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetStreamingChatMessageContentsAsync(this._client.ModelId, chatHistory, executionSettings, kernel, cancellationToken); /// - public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); + public Task> GetTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetChatAsTextContentsAsync(this._client.ModelId, prompt, executionSettings, kernel, cancellationToken); /// - public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._core.GetChatAsTextStreamingContentsAsync(prompt, executionSettings, kernel, cancellationToken); + public IAsyncEnumerable GetStreamingTextContentsAsync( + string prompt, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetChatAsTextStreamingContentsAsync(this._client.ModelId, prompt, executionSettings, kernel, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs similarity index 97% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs index 690954448eea..2b7f1bde31d8 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIFileService.cs @@ -20,8 +20,11 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// File service access for OpenAI: https://api.openai.com/v1/files /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public sealed class OpenAIFileService { + private const string OrganizationKey = "Organization"; private const string HeaderNameAuthorization = "Authorization"; private const string HeaderNameAzureApiKey = "api-key"; private const string HeaderNameOpenAIAssistant = "OpenAI-Beta"; @@ -284,7 +287,7 @@ private void AddRequestHeaders(HttpRequestMessage request) if (!string.IsNullOrEmpty(this._organization)) { - this._httpClient.DefaultRequestHeaders.Add(OpenAIClientCore.OrganizationKey, this._organization); + this._httpClient.DefaultRequestHeaders.Add(OrganizationKey, this._organization); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextEmbbedingGenerationService.cs similarity index 76% rename from dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextEmbbedingGenerationService.cs index c940a7caf291..aa70819020d0 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextEmbbedingGenerationService.cs @@ -6,24 +6,23 @@ using System.Net.Http; using System.Threading; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.Services; +using OpenAI; namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// -/// OpenAI text embedding service. +/// OpenAI implementation of /// [Experimental("SKEXP0010")] public sealed class OpenAITextEmbeddingGenerationService : ITextEmbeddingGenerationService { - private readonly OpenAIClientCore _core; + private readonly ClientCore _client; private readonly int? _dimensions; /// - /// Create an instance of the OpenAI text embedding connector + /// Initializes a new instance of the class. /// /// Model name /// OpenAI API Key @@ -39,20 +38,20 @@ public OpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new( + Verify.NotNullOrWhiteSpace(modelId); + this._client = new( modelId: modelId, apiKey: apiKey, - organization: organization, + endpoint: null, + organizationId: organization, httpClient: httpClient, logger: loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._dimensions = dimensions; } /// - /// Create an instance of the OpenAI text embedding connector + /// Initializes a new instance of the class. /// /// Model name /// Custom for HTTP requests. @@ -64,14 +63,13 @@ public OpenAITextEmbeddingGenerationService( ILoggerFactory? loggerFactory = null, int? dimensions = null) { - this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - + Verify.NotNullOrWhiteSpace(modelId); + this._client = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextEmbeddingGenerationService))); this._dimensions = dimensions; } /// - public IReadOnlyDictionary Attributes => this._core.Attributes; + public IReadOnlyDictionary Attributes => this._client.Attributes; /// public Task>> GenerateEmbeddingsAsync( @@ -79,7 +77,7 @@ public Task>> GenerateEmbeddingsAsync( Kernel? kernel = null, CancellationToken cancellationToken = default) { - this._core.LogActionDetails(); - return this._core.GetEmbeddingsAsync(data, kernel, this._dimensions, cancellationToken); + this._client.LogActionDetails(); + return this._client.GetEmbeddingsAsync(this._client.ModelId, data, kernel, this._dimensions, cancellationToken); } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToAudioService.cs similarity index 77% rename from dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioService.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToAudioService.cs index 177acf539a41..93b5ede244fb 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioService.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToAudioService.cs @@ -14,13 +14,13 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// /// OpenAI text-to-audio service. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class OpenAITextToAudioService : ITextToAudioService { /// /// OpenAI text-to-audio client for HTTP operations. /// - private readonly OpenAITextToAudioClient _client; + private readonly ClientCore _client; /// /// Gets the attribute name used to store the organization in the dictionary. @@ -31,7 +31,7 @@ public sealed class OpenAITextToAudioService : ITextToAudioService public IReadOnlyDictionary Attributes => this._client.Attributes; /// - /// Creates an instance of the with API key auth. + /// Initializes a new instance of the class. /// /// Model name /// OpenAI API Key @@ -45,10 +45,8 @@ public OpenAITextToAudioService( HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) { - this._client = new(modelId, apiKey, organization, httpClient, loggerFactory?.CreateLogger(typeof(OpenAITextToAudioService))); - - this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._client.AddAttribute(OrganizationKey, organization); + Verify.NotNullOrWhiteSpace(modelId, nameof(modelId)); + this._client = new(modelId, apiKey, organization, null, httpClient, loggerFactory?.CreateLogger(typeof(OpenAITextToAudioService))); } /// @@ -57,5 +55,5 @@ public Task> GetAudioContentsAsync( PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - => this._client.GetAudioContentsAsync(text, executionSettings, cancellationToken); + => this._client.GetAudioContentsAsync(this._client.ModelId, text, executionSettings, cancellationToken); } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs new file mode 100644 index 000000000000..4967d87228ff --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAITextToImageService.cs @@ -0,0 +1,49 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel.TextToImage; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// OpenAI text to image service. +/// +[Experimental("SKEXP0010")] +public class OpenAITextToImageService : ITextToImageService +{ + private readonly ClientCore _client; + + /// + public IReadOnlyDictionary Attributes => this._client.Attributes; + + /// + /// Initializes a new instance of the class. + /// + /// OpenAI API key, see https://platform.openai.com/account/api-keys + /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. + /// The model to use for image generation. + /// Custom for HTTP requests. + /// The to use for logging. If null, no logging will be performed. + public OpenAITextToImageService( + string apiKey, + string? organization = null, + string? modelId = null, + HttpClient? httpClient = null, + ILoggerFactory? loggerFactory = null) + { + this._client = new(modelId ?? "dall-e-2", apiKey, organization, null, httpClient, loggerFactory?.CreateLogger(this.GetType())); + } + + /// + public Task> GetImageContentsAsync( + TextContent input, + PromptExecutionSettings? executionSettings = null, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + => this._client.GetImageContentsAsync(this._client.ModelId, input, executionSettings, kernel, cancellationToken); +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIAudioToTextExecutionSettings.cs similarity index 89% rename from dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIAudioToTextExecutionSettings.cs index ef7f5e54f7df..441d29c80607 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/AudioToText/OpenAIAudioToTextExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIAudioToTextExecutionSettings.cs @@ -1,6 +1,5 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Text.Json; @@ -12,7 +11,7 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// /// Execution settings for OpenAI audio-to-text request. /// -[Experimental("SKEXP0001")] +[Experimental("SKEXP0010")] public sealed class OpenAIAudioToTextExecutionSettings : PromptExecutionSettings { /// @@ -35,6 +34,7 @@ public string Filename /// An optional language of the audio data as two-letter ISO-639-1 language code (e.g. 'en' or 'es'). /// [JsonPropertyName("language")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Language { get => this._language; @@ -50,6 +50,7 @@ public string? Language /// An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language. /// [JsonPropertyName("prompt")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? Prompt { get => this._prompt; @@ -62,10 +63,11 @@ public string? Prompt } /// - /// The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. Default is 'json'. + /// The format of the transcript output, in one of these options: json, srt, verbose_json, or vtt. Default is 'json'. /// [JsonPropertyName("response_format")] - public string ResponseFormat + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ResponseFormat { get => this._responseFormat; @@ -83,7 +85,8 @@ public string ResponseFormat /// Default is 0. /// [JsonPropertyName("temperature")] - public float Temperature + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Temperature { get => this._temperature; @@ -146,20 +149,15 @@ public override PromptExecutionSettings Clone() var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); - if (openAIExecutionSettings is not null) - { - return openAIExecutionSettings; - } - - throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAIAudioToTextExecutionSettings)}", nameof(executionSettings)); + return openAIExecutionSettings!; } #region private ================================================================================ private const string DefaultFilename = "file.mp3"; - private float _temperature = 0; - private string _responseFormat = "json"; + private float? _temperature = 0; + private string? _responseFormat; private string _filename; private string? _language; private string? _prompt; diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileUploadExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIFileUploadExecutionSettings.cs similarity index 85% rename from dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileUploadExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIFileUploadExecutionSettings.cs index 42011da487f0..9412ea745fa3 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileUploadExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIFileUploadExecutionSettings.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; using System.Diagnostics.CodeAnalysis; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -8,6 +9,8 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// Execution serttings associated with Open AI file upload . /// [Experimental("SKEXP0010")] +[Obsolete("Use OpenAI SDK or AzureOpenAI SDK clients for file operations. This class is deprecated and will be removed in a future version.")] +[ExcludeFromCodeCoverage] public sealed class OpenAIFileUploadExecutionSettings { /// diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs similarity index 76% rename from dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index 36796c62f7b9..e6a86b5f1ba2 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -6,9 +6,9 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Serialization; -using Azure.AI.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Text; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -16,7 +16,7 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI; /// Execution settings for an OpenAI completion request. /// [JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] -public sealed class OpenAIPromptExecutionSettings : PromptExecutionSettings +public class OpenAIPromptExecutionSettings : PromptExecutionSettings { /// /// Temperature controls the randomness of the completion. @@ -24,7 +24,8 @@ public sealed class OpenAIPromptExecutionSettings : PromptExecutionSettings /// Default is 1.0. /// [JsonPropertyName("temperature")] - public double Temperature + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? Temperature { get => this._temperature; @@ -41,7 +42,8 @@ public double Temperature /// Default is 1.0. /// [JsonPropertyName("top_p")] - public double TopP + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? TopP { get => this._topP; @@ -58,7 +60,8 @@ public double TopP /// model's likelihood to talk about new topics. /// [JsonPropertyName("presence_penalty")] - public double PresencePenalty + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? PresencePenalty { get => this._presencePenalty; @@ -75,7 +78,8 @@ public double PresencePenalty /// the model's likelihood to repeat the same line verbatim. /// [JsonPropertyName("frequency_penalty")] - public double FrequencyPenalty + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? FrequencyPenalty { get => this._frequencyPenalty; @@ -90,6 +94,7 @@ public double FrequencyPenalty /// The maximum number of tokens to generate in the completion. /// [JsonPropertyName("max_tokens")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public int? MaxTokens { get => this._maxTokens; @@ -105,6 +110,7 @@ public int? MaxTokens /// Sequences where the completion will stop generating further tokens. /// [JsonPropertyName("stop_sequences")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public IList? StopSequences { get => this._stopSequences; @@ -116,28 +122,12 @@ public IList? StopSequences } } - /// - /// How many completions to generate for each prompt. Default is 1. - /// Note: Because this parameter generates many completions, it can quickly consume your token quota. - /// Use carefully and ensure that you have reasonable settings for max_tokens and stop. - /// - [JsonPropertyName("results_per_prompt")] - public int ResultsPerPrompt - { - get => this._resultsPerPrompt; - - set - { - this.ThrowIfFrozen(); - this._resultsPerPrompt = value; - } - } - /// /// If specified, the system will make a best effort to sample deterministically such that repeated requests with the /// same seed and parameters should return the same result. Determinism is not guaranteed. /// [JsonPropertyName("seed")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public long? Seed { get => this._seed; @@ -153,10 +143,14 @@ public long? Seed /// Gets or sets the response format to use for the completion. /// /// - /// Possible values are: "json_object", "text", object. + /// Possible values are: + /// - values: "json_object", "text"; + /// - object; + /// - object, which will be used to automatically create a JSON schema. /// [Experimental("SKEXP0010")] [JsonPropertyName("response_format")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public object? ResponseFormat { get => this._responseFormat; @@ -173,6 +167,7 @@ public object? ResponseFormat /// Defaults to "Assistant is a large language model." /// [JsonPropertyName("chat_system_prompt")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? ChatSystemPrompt { get => this._chatSystemPrompt; @@ -188,6 +183,7 @@ public string? ChatSystemPrompt /// Modify the likelihood of specified tokens appearing in the completion. /// [JsonPropertyName("token_selection_biases")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public IDictionary? TokenSelectionBiases { get => this._tokenSelectionBiases; @@ -260,6 +256,7 @@ public string? User /// [Experimental("SKEXP0010")] [JsonPropertyName("logprobs")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public bool? Logprobs { get => this._logprobs; @@ -276,6 +273,7 @@ public bool? Logprobs /// [Experimental("SKEXP0010")] [JsonPropertyName("top_logprobs")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public int? TopLogprobs { get => this._topLogprobs; @@ -287,23 +285,6 @@ public int? TopLogprobs } } - /// - /// An abstraction of additional settings for chat completion, see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.azurechatextensionsoptions. - /// This property is compatible only with Azure OpenAI. - /// - [Experimental("SKEXP0010")] - [JsonIgnore] - public AzureChatExtensionsOptions? AzureChatExtensionsOptions - { - get => this._azureChatExtensionsOptions; - - set - { - this.ThrowIfFrozen(); - this._azureChatExtensionsOptions = value; - } - } - /// public override void Freeze() { @@ -328,34 +309,9 @@ public override void Freeze() /// public override PromptExecutionSettings Clone() { - return new OpenAIPromptExecutionSettings() - { - ModelId = this.ModelId, - ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, - Temperature = this.Temperature, - TopP = this.TopP, - PresencePenalty = this.PresencePenalty, - FrequencyPenalty = this.FrequencyPenalty, - MaxTokens = this.MaxTokens, - StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, - ResultsPerPrompt = this.ResultsPerPrompt, - Seed = this.Seed, - ResponseFormat = this.ResponseFormat, - TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null, - ToolCallBehavior = this.ToolCallBehavior, - User = this.User, - ChatSystemPrompt = this.ChatSystemPrompt, - Logprobs = this.Logprobs, - TopLogprobs = this.TopLogprobs, - AzureChatExtensionsOptions = this.AzureChatExtensionsOptions, - }; + return this.Clone(); } - /// - /// Default max tokens for a text generation - /// - internal static int DefaultTextMaxTokens { get; } = 256; - /// /// Create a new settings object with the values from another settings object. /// @@ -380,44 +336,50 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio var json = JsonSerializer.Serialize(executionSettings); var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); - if (openAIExecutionSettings is not null) - { - return openAIExecutionSettings; - } - throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAIPromptExecutionSettings)}", nameof(executionSettings)); + // Restore the function choice behavior that lost internal state(list of function instances) during serialization/deserialization process. + openAIExecutionSettings!.FunctionChoiceBehavior = executionSettings.FunctionChoiceBehavior; + + return openAIExecutionSettings; } /// - /// Create a new settings object with the values from another settings object. + /// Clone the settings object. /// - /// Template configuration - /// Default max tokens - /// An instance of OpenAIPromptExecutionSettings - [Obsolete("This method is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions")] - public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(PromptExecutionSettings? executionSettings, int? defaultMaxTokens = null) + /// The type of the settings object to clone. + /// A new instance of the settings object. + protected internal T Clone() where T : OpenAIPromptExecutionSettings, new() { - var settings = FromExecutionSettings(executionSettings, defaultMaxTokens); - - if (settings.StopSequences?.Count == 0) + return new T() { - // Azure OpenAI WithData API does not allow to send empty array of stop sequences - // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0" - settings.StopSequences = null; - } - - return settings; + ModelId = this.ModelId, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + Temperature = this.Temperature, + TopP = this.TopP, + PresencePenalty = this.PresencePenalty, + FrequencyPenalty = this.FrequencyPenalty, + MaxTokens = this.MaxTokens, + StopSequences = this.StopSequences is not null ? new List(this.StopSequences) : null, + Seed = this.Seed, + ResponseFormat = this.ResponseFormat, + TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null, + ToolCallBehavior = this.ToolCallBehavior, + FunctionChoiceBehavior = this.FunctionChoiceBehavior, + User = this.User, + ChatSystemPrompt = this.ChatSystemPrompt, + Logprobs = this.Logprobs, + TopLogprobs = this.TopLogprobs + }; } #region private ================================================================================ - private double _temperature = 1; - private double _topP = 1; - private double _presencePenalty; - private double _frequencyPenalty; + private double? _temperature; + private double? _topP; + private double? _presencePenalty; + private double? _frequencyPenalty; private int? _maxTokens; private IList? _stopSequences; - private int _resultsPerPrompt = 1; private long? _seed; private object? _responseFormat; private IDictionary? _tokenSelectionBiases; @@ -426,7 +388,6 @@ public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(Prompt private string? _chatSystemPrompt; private bool? _logprobs; private int? _topLogprobs; - private AzureChatExtensionsOptions? _azureChatExtensionsOptions; #endregion } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToAudioExecutionSettings.cs similarity index 84% rename from dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioExecutionSettings.cs rename to dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToAudioExecutionSettings.cs index ddb97ff93c35..cfb9cfa39dd0 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/OpenAITextToAudioExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToAudioExecutionSettings.cs @@ -1,6 +1,5 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Text.Json; @@ -34,7 +33,8 @@ public string Voice /// The format to audio in. Supported formats are mp3, opus, aac, and flac. /// [JsonPropertyName("response_format")] - public string ResponseFormat + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ResponseFormat { get => this._responseFormat; @@ -49,7 +49,8 @@ public string ResponseFormat /// The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default. /// [JsonPropertyName("speed")] - public float Speed + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public float? Speed { get => this._speed; @@ -72,9 +73,9 @@ public OpenAITextToAudioExecutionSettings() /// Creates an instance of class. /// /// The voice to use when generating the audio. Supported voices are alloy, echo, fable, onyx, nova, and shimmer. - public OpenAITextToAudioExecutionSettings(string voice) + public OpenAITextToAudioExecutionSettings(string? voice) { - this._voice = voice; + this._voice = voice ?? DefaultVoice; } /// @@ -94,7 +95,7 @@ public override PromptExecutionSettings Clone() /// /// Instance of . /// Instance of . - public static OpenAITextToAudioExecutionSettings? FromExecutionSettings(PromptExecutionSettings? executionSettings) + public static OpenAITextToAudioExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings) { if (executionSettings is null) { @@ -110,20 +111,15 @@ public override PromptExecutionSettings Clone() var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive); - if (openAIExecutionSettings is not null) - { - return openAIExecutionSettings; - } - - throw new ArgumentException($"Invalid execution settings, cannot convert to {nameof(OpenAITextToAudioExecutionSettings)}", nameof(executionSettings)); + return openAIExecutionSettings!; } #region private ================================================================================ private const string DefaultVoice = "alloy"; - private float _speed = 1.0f; - private string _responseFormat = "mp3"; + private float? _speed; + private string? _responseFormat; private string _voice; #endregion diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToImageExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToImageExecutionSettings.cs new file mode 100644 index 000000000000..13e8a6b74b1f --- /dev/null +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAITextToImageExecutionSettings.cs @@ -0,0 +1,201 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Text; +using OpenAI.Images; + +namespace Microsoft.SemanticKernel.Connectors.OpenAI; + +/// +/// Text to image execution settings for an OpenAI image generation request. +/// +[JsonNumberHandling(JsonNumberHandling.AllowReadingFromString)] +public sealed class OpenAITextToImageExecutionSettings : PromptExecutionSettings +{ + /// + /// Optional width and height of the generated image. + /// + /// + /// + /// Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2 model. + /// Must be one of 1024x1024, 1792x1024, 1024x1792 for dall-e-3 model. + /// + /// + public (int Width, int Height)? Size + { + get => this._size; + + set + { + this.ThrowIfFrozen(); + this._size = value; + } + } + + /// + /// The quality of the image that will be generated. + /// + /// + /// Must be one of standard or hd or high. + /// + /// standard: creates images with standard quality. This is the default. + /// hd OR high: creates images with finer details and greater consistency. + /// + /// This param is only supported for dall-e-3 model. + /// + [JsonPropertyName("quality")] + public string? Quality + { + get => this._quality; + + set + { + this.ThrowIfFrozen(); + this._quality = value; + } + } + + /// + /// The style of the generated images. + /// + /// + /// Must be one of vivid or natural. + /// + /// vivid: causes the model to lean towards generating hyper-real and dramatic images. + /// natural: causes the model to produce more natural, less hyper-real looking images. + /// + /// This param is only supported for dall-e-3 model. + /// + [JsonPropertyName("style")] + public string? Style + { + get => this._style; + + set + { + this.ThrowIfFrozen(); + this._style = value; + } + } + + /// + /// The format of the generated images. + /// Can be a or a string where: + /// + /// : causes the model to generated in the provided format + /// url OR uri: causes the model to return an url for the generated images. + /// b64_json or bytes: causes the model to return in a Base64 format the content of the images. + /// + /// + [JsonPropertyName("response_format")] + public object? ResponseFormat + { + get => this._responseFormat; + set + { + this.ThrowIfFrozen(); + this._responseFormat = value; + } + } + + /// + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + /// + [JsonPropertyName("user")] + public string? EndUserId + { + get => this._endUserId; + set + { + this.ThrowIfFrozen(); + this._endUserId = value; + } + } + + /// + public override void Freeze() + { + if (this.IsFrozen) + { + return; + } + + base.Freeze(); + } + + /// + public override PromptExecutionSettings Clone() + { + return new OpenAITextToImageExecutionSettings() + { + ModelId = this.ModelId, + ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null, + Size = this.Size + }; + } + + /// + /// Create a new settings object with the values from another settings object. + /// + /// Template configuration + /// An instance of OpenAIPromptExecutionSettings + public static OpenAITextToImageExecutionSettings FromExecutionSettings(PromptExecutionSettings? executionSettings) + { + if (executionSettings is null) + { + return new OpenAITextToImageExecutionSettings(); + } + + if (executionSettings is OpenAITextToImageExecutionSettings settings) + { + return settings; + } + + var json = JsonSerializer.Serialize(executionSettings); + var openAIExecutionSettings = JsonSerializer.Deserialize(json, JsonOptionsCache.ReadPermissive)!; + if (openAIExecutionSettings.ExtensionData?.TryGetValue("width", out var width) ?? false) + { + openAIExecutionSettings.Width = ((JsonElement)width).GetInt32(); + } + if (openAIExecutionSettings.ExtensionData?.TryGetValue("height", out var height) ?? false) + { + openAIExecutionSettings.Height = ((JsonElement)height).GetInt32(); + } + + return openAIExecutionSettings!; + } + + #region private ================================================================================ + + [JsonPropertyName("width")] + internal int? Width + { + get => this.Size?.Width; + set + { + if (!value.HasValue) { return; } + this.Size = (value.Value, this.Size?.Height ?? 0); + } + } + + [JsonPropertyName("height")] + internal int? Height + { + get => this.Size?.Height; + set + { + if (!value.HasValue) { return; } + this.Size = (this.Size?.Width ?? 0, value.Value); + } + } + + private (int Width, int Height)? _size; + private string? _quality; + private string? _style; + private object? _responseFormat; + private string? _endUserId; + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs deleted file mode 100644 index 20111ca99f88..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/AzureOpenAITextGenerationService.cs +++ /dev/null @@ -1,97 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextGeneration; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI text generation client. -/// -public sealed class AzureOpenAITextGenerationService : ITextGenerationService -{ - private readonly AzureOpenAIClientCore _core; - - /// - public IReadOnlyDictionary Attributes => this._core.Attributes; - - /// - /// Creates a new client instance using API Key auth - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextGenerationService( - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new(deploymentName, endpoint, apiKey, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - /// Creates a new client instance supporting AAD auth - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextGenerationService( - string deploymentName, - string endpoint, - TokenCredential credential, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new(deploymentName, endpoint, credential, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - /// Creates a new client instance using the specified OpenAIClient - /// - /// Azure OpenAI model ID or deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom . - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextGenerationService( - string deploymentName, - OpenAIClient openAIClient, - string? modelId = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new(deploymentName, openAIClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetTextResultsAsync(prompt, executionSettings, kernel, cancellationToken); - } - - /// - public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs deleted file mode 100644 index 1133865171fd..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextGeneration/OpenAITextGenerationService.cs +++ /dev/null @@ -1,77 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextGeneration; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text generation service. -/// -public sealed class OpenAITextGenerationService : ITextGenerationService -{ - private readonly OpenAIClientCore _core; - - /// - public IReadOnlyDictionary Attributes => this._core.Attributes; - - /// - /// Create an instance of the OpenAI text generation connector - /// - /// Model name - /// OpenAI API Key - /// OpenAI Organization Id (usually optional) - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextGenerationService( - string modelId, - string apiKey, - string? organization = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._core = new( - modelId: modelId, - apiKey: apiKey, - organization: organization, - httpClient: httpClient, - logger: loggerFactory?.CreateLogger(typeof(OpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); - } - - /// - /// Create an instance of the OpenAI text generation connector - /// - /// Model name - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextGenerationService( - string modelId, - OpenAIClient openAIClient, - ILoggerFactory? loggerFactory = null) - { - this._core = new(modelId, openAIClient, loggerFactory?.CreateLogger(typeof(OpenAITextGenerationService))); - - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetTextResultsAsync(prompt, executionSettings, kernel, cancellationToken); - } - - /// - public IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - return this._core.GetStreamingTextContentsAsync(prompt, executionSettings, kernel, cancellationToken); - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/AzureOpenAITextToAudioService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/AzureOpenAITextToAudioService.cs deleted file mode 100644 index 47aac090ab05..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/AzureOpenAITextToAudioService.cs +++ /dev/null @@ -1,63 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextToAudio; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI text-to-audio service. -/// -[Experimental("SKEXP0001")] -public sealed class AzureOpenAITextToAudioService : ITextToAudioService -{ - /// - /// Azure OpenAI text-to-audio client for HTTP operations. - /// - private readonly AzureOpenAITextToAudioClient _client; - - /// - public IReadOnlyDictionary Attributes => this._client.Attributes; - - /// - /// Gets the key used to store the deployment name in the dictionary. - /// - public static string DeploymentNameKey => "DeploymentName"; - - /// - /// Creates an instance of the connector with API key auth. - /// - /// Azure OpenAI deployment name, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Azure OpenAI deployment URL, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI API key, see https://learn.microsoft.com/azure/cognitive-services/openai/quickstart - /// Azure OpenAI model id, see https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public AzureOpenAITextToAudioService( - string deploymentName, - string endpoint, - string apiKey, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - this._client = new(deploymentName, endpoint, apiKey, modelId, httpClient, loggerFactory?.CreateLogger(typeof(AzureOpenAITextToAudioService))); - - this._client.AddAttribute(DeploymentNameKey, deploymentName); - this._client.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - /// - public Task> GetAudioContentsAsync( - string text, - PromptExecutionSettings? executionSettings = null, - Kernel? kernel = null, - CancellationToken cancellationToken = default) - => this._client.GetAudioContentsAsync(text, executionSettings, cancellationToken); -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs deleted file mode 100644 index bc7aeede3b57..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToAudio/TextToAudioRequest.cs +++ /dev/null @@ -1,26 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text-to-audio request model, see . -/// -internal sealed class TextToAudioRequest(string model, string input, string voice) -{ - [JsonPropertyName("model")] - public string Model { get; set; } = model; - - [JsonPropertyName("input")] - public string Input { get; set; } = input; - - [JsonPropertyName("voice")] - public string Voice { get; set; } = voice; - - [JsonPropertyName("response_format")] - public string ResponseFormat { get; set; } = "mp3"; - - [JsonPropertyName("speed")] - public float Speed { get; set; } = 1.0f; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs deleted file mode 100644 index efa3ffcc87c0..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/AzureOpenAITextToImageService.cs +++ /dev/null @@ -1,212 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Azure; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextToImage; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Azure OpenAI Image generation -/// -/// -[Experimental("SKEXP0010")] -public sealed class AzureOpenAITextToImageService : ITextToImageService -{ - private readonly OpenAIClient _client; - private readonly ILogger _logger; - private readonly string _deploymentName; - private readonly Dictionary _attributes = []; - - /// - public IReadOnlyDictionary Attributes => this._attributes; - - /// - /// Gets the key used to store the deployment name in the dictionary. - /// - public static string DeploymentNameKey => "DeploymentName"; - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Deployment name identifier - /// Azure OpenAI deployment URL - /// Azure OpenAI API key - /// Model identifier - /// Custom for HTTP requests. - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - /// Azure OpenAI Endpoint ApiVersion - public AzureOpenAITextToImageService( - string deploymentName, - string endpoint, - string apiKey, - string? modelId, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null, - string? apiVersion = null) - { - Verify.NotNullOrWhiteSpace(apiKey); - Verify.NotNullOrWhiteSpace(deploymentName); - - this._deploymentName = deploymentName; - - if (modelId is not null) - { - this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - this.AddAttribute(DeploymentNameKey, deploymentName); - - this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; - - var connectorEndpoint = (!string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri) ?? - throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); - - this._client = new(new Uri(connectorEndpoint), - new AzureKeyCredential(apiKey), - GetClientOptions(httpClient, apiVersion)); - } - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Deployment name identifier - /// Azure OpenAI deployment URL - /// Token credentials, e.g. DefaultAzureCredential, ManagedIdentityCredential, EnvironmentCredential, etc. - /// Model identifier - /// Custom for HTTP requests. - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - /// Azure OpenAI Endpoint ApiVersion - public AzureOpenAITextToImageService( - string deploymentName, - string endpoint, - TokenCredential credential, - string? modelId, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null, - string? apiVersion = null) - { - Verify.NotNull(credential); - Verify.NotNullOrWhiteSpace(deploymentName); - - this._deploymentName = deploymentName; - - if (modelId is not null) - { - this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - this.AddAttribute(DeploymentNameKey, deploymentName); - - this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; - - var connectorEndpoint = !string.IsNullOrWhiteSpace(endpoint) ? endpoint! : httpClient?.BaseAddress?.AbsoluteUri; - if (connectorEndpoint is null) - { - throw new ArgumentException($"The {nameof(httpClient)}.{nameof(HttpClient.BaseAddress)} and {nameof(endpoint)} are both null or empty. Please ensure at least one is provided."); - } - - this._client = new(new Uri(connectorEndpoint), - credential, - GetClientOptions(httpClient, apiVersion)); - } - - /// - /// Create a new instance of Azure OpenAI image generation service - /// - /// Deployment name identifier - /// to use for the service. - /// Model identifier - /// The ILoggerFactory used to create a logger for logging. If null, no logging will be performed. - public AzureOpenAITextToImageService( - string deploymentName, - OpenAIClient openAIClient, - string? modelId, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNull(openAIClient); - Verify.NotNullOrWhiteSpace(deploymentName); - - this._deploymentName = deploymentName; - - if (modelId is not null) - { - this.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - this.AddAttribute(DeploymentNameKey, deploymentName); - - this._logger = loggerFactory?.CreateLogger(typeof(AzureOpenAITextToImageService)) ?? NullLogger.Instance; - - this._client = openAIClient; - } - - /// - public async Task GenerateImageAsync( - string description, - int width, - int height, - Kernel? kernel = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(description); - - var size = (width, height) switch - { - (1024, 1024) => ImageSize.Size1024x1024, - (1792, 1024) => ImageSize.Size1792x1024, - (1024, 1792) => ImageSize.Size1024x1792, - _ => throw new NotSupportedException("Dall-E 3 can only generate images of the following sizes 1024x1024, 1792x1024, or 1024x1792") - }; - - Response imageGenerations; - try - { - imageGenerations = await this._client.GetImageGenerationsAsync( - new ImageGenerationOptions - { - DeploymentName = this._deploymentName, - Prompt = description, - Size = size, - }, cancellationToken).ConfigureAwait(false); - } - catch (RequestFailedException e) - { - throw e.ToHttpOperationException(); - } - - if (!imageGenerations.HasValue) - { - throw new KernelException("The response does not contain an image result"); - } - - if (imageGenerations.Value.Data.Count == 0) - { - throw new KernelException("The response does not contain any image"); - } - - return imageGenerations.Value.Data[0].Url.AbsoluteUri; - } - - private static OpenAIClientOptions GetClientOptions(HttpClient? httpClient, string? apiVersion) => - ClientCore.GetOpenAIClientOptions(httpClient, apiVersion switch - { - // DALL-E 3 is supported in the latest API releases - _ => OpenAIClientOptions.ServiceVersion.V2024_02_15_Preview - }); - - internal void AddAttribute(string key, string? value) - { - if (!string.IsNullOrEmpty(value)) - { - this._attributes.Add(key, value); - } - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs deleted file mode 100644 index 335fe8cad5ee..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs +++ /dev/null @@ -1,117 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Net.Http; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Services; -using Microsoft.SemanticKernel.TextToImage; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// OpenAI text to image service. -/// -[Experimental("SKEXP0010")] -public sealed class OpenAITextToImageService : ITextToImageService -{ - private readonly OpenAITextToImageClientCore _core; - - /// - /// OpenAI REST API endpoint - /// - private const string OpenAIEndpoint = "https://api.openai.com/v1/images/generations"; - - /// - /// Optional value for the OpenAI-Organization header. - /// - private readonly string? _organizationHeaderValue; - - /// - /// Value for the authorization header. - /// - private readonly string _authorizationHeaderValue; - - /// - /// The model to use for image generation. - /// - private readonly string? _modelId; - - /// - /// Initializes a new instance of the class. - /// - /// OpenAI API key, see https://platform.openai.com/account/api-keys - /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations. - /// The model to use for image generation. - /// Custom for HTTP requests. - /// The to use for logging. If null, no logging will be performed. - public OpenAITextToImageService( - string apiKey, - string? organization = null, - string? modelId = null, - HttpClient? httpClient = null, - ILoggerFactory? loggerFactory = null) - { - Verify.NotNullOrWhiteSpace(apiKey); - this._authorizationHeaderValue = $"Bearer {apiKey}"; - this._organizationHeaderValue = organization; - this._modelId = modelId; - - this._core = new(httpClient, loggerFactory?.CreateLogger(this.GetType())); - this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization); - if (modelId is not null) - { - this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId); - } - - this._core.RequestCreated += (_, request) => - { - request.Headers.Add("Authorization", this._authorizationHeaderValue); - if (!string.IsNullOrEmpty(this._organizationHeaderValue)) - { - request.Headers.Add("OpenAI-Organization", this._organizationHeaderValue); - } - }; - } - - /// - public IReadOnlyDictionary Attributes => this._core.Attributes; - - /// - public Task GenerateImageAsync(string description, int width, int height, Kernel? kernel = null, CancellationToken cancellationToken = default) - { - Verify.NotNull(description); - if (width != height || (width != 256 && width != 512 && width != 1024)) - { - throw new ArgumentOutOfRangeException(nameof(width), width, "OpenAI can generate only square images of size 256x256, 512x512, or 1024x1024."); - } - - return this.GenerateImageAsync(this._modelId, description, width, height, "url", x => x.Url, cancellationToken); - } - - private async Task GenerateImageAsync( - string? model, - string description, - int width, int height, - string format, Func extractResponse, - CancellationToken cancellationToken) - { - Verify.NotNull(extractResponse); - - var requestBody = JsonSerializer.Serialize(new TextToImageRequest - { - Model = model, - Prompt = description, - Size = $"{width}x{height}", - Count = 1, - Format = format, - }); - - var list = await this._core.ExecuteImageGenerationRequestAsync(OpenAIEndpoint, requestBody, extractResponse!, cancellationToken).ConfigureAwait(false); - return list[0]; - } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs deleted file mode 100644 index 70b5ac5418ee..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs +++ /dev/null @@ -1,42 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Text to image request -/// -internal sealed class TextToImageRequest -{ - /// - /// Model to use for image generation - /// - [JsonPropertyName("model")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Model { get; set; } - - /// - /// Image prompt - /// - [JsonPropertyName("prompt")] - public string Prompt { get; set; } = string.Empty; - - /// - /// Image size - /// - [JsonPropertyName("size")] - public string Size { get; set; } = "256x256"; - - /// - /// How many images to generate - /// - [JsonPropertyName("n")] - public int Count { get; set; } = 1; - - /// - /// Image format, "url" or "b64_json" - /// - [JsonPropertyName("response_format")] - public string Format { get; set; } = "url"; -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs deleted file mode 100644 index cba10ba14331..000000000000 --- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageResponse.cs +++ /dev/null @@ -1,44 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Connectors.OpenAI; - -/// -/// Text to image response -/// -internal sealed class TextToImageResponse -{ - /// - /// OpenAI Image response - /// - public sealed class Image - { - /// - /// URL to the image created - /// - [JsonPropertyName("url")] - [SuppressMessage("Design", "CA1056:URI return values should not be strings", Justification = "Using the original value")] - public string Url { get; set; } = string.Empty; - - /// - /// Image content in base64 format - /// - [JsonPropertyName("b64_json")] - public string AsBase64 { get; set; } = string.Empty; - } - - /// - /// List of possible images - /// - [JsonPropertyName("data")] - public IList Images { get; set; } = []; - - /// - /// Creation time - /// - [JsonPropertyName("created")] - public int CreatedTime { get; set; } -} diff --git a/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs b/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs index 7a5490c736ea..0cad674c887e 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/ToolCallBehavior.cs @@ -4,9 +4,10 @@ using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; using System.Linq; using System.Text.Json; -using Azure.AI.OpenAI; +using OpenAI.Chat; namespace Microsoft.SemanticKernel.Connectors.OpenAI; @@ -94,6 +95,7 @@ private ToolCallBehavior(bool autoInvoke) /// Options to control tool call result serialization behavior. /// [Obsolete("This property is deprecated in favor of Kernel.SerializerOptions that will be introduced in one of the following releases.")] + [ExcludeFromCodeCoverage] [EditorBrowsable(EditorBrowsableState.Never)] public virtual JsonSerializerOptions? ToolCallResultSerializerOptions { get; set; } @@ -118,10 +120,9 @@ private ToolCallBehavior(bool autoInvoke) /// true if it's ok to invoke any kernel function requested by the model if it's found; false if a request needs to be validated against an allow list. internal virtual bool AllowAnyRequestedKernelFunction => false; - /// Configures the with any tools this provides. - /// The used for the operation. This can be queried to determine what tools to provide into the . - /// The destination to configure. - internal abstract void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options); + /// Returns list of available tools and the way model should use them. + /// The used for the operation. This can be queried to determine what tools to return. + internal abstract (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel); /// /// Represents a that will provide to the model all available functions from a @@ -133,8 +134,11 @@ internal KernelFunctions(bool autoInvoke) : base(autoInvoke) { } public override string ToString() => $"{nameof(KernelFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0})"; - internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel) { + ChatToolChoice? choice = null; + List? tools = null; + // If no kernel is provided, we don't have any tools to provide. if (kernel is not null) { @@ -142,13 +146,16 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o IList functions = kernel.Plugins.GetFunctionsMetadata(); if (functions.Count > 0) { - options.ToolChoice = ChatCompletionsToolChoice.Auto; + choice = ChatToolChoice.CreateAutoChoice(); + tools = []; for (int i = 0; i < functions.Count; i++) { - options.Tools.Add(new ChatCompletionsFunctionToolDefinition(functions[i].ToOpenAIFunction().ToFunctionDefinition())); + tools.Add(functions[i].ToOpenAIFunction().ToFunctionDefinition()); } } } + + return (tools, choice); } internal override bool AllowAnyRequestedKernelFunction => true; @@ -160,26 +167,29 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o internal sealed class EnabledFunctions : ToolCallBehavior { private readonly OpenAIFunction[] _openAIFunctions; - private readonly ChatCompletionsFunctionToolDefinition[] _functions; + private readonly ChatTool[] _functions; public EnabledFunctions(IEnumerable functions, bool autoInvoke) : base(autoInvoke) { this._openAIFunctions = functions.ToArray(); - var defs = new ChatCompletionsFunctionToolDefinition[this._openAIFunctions.Length]; + var defs = new ChatTool[this._openAIFunctions.Length]; for (int i = 0; i < defs.Length; i++) { - defs[i] = new ChatCompletionsFunctionToolDefinition(this._openAIFunctions[i].ToFunctionDefinition()); + defs[i] = this._openAIFunctions[i].ToFunctionDefinition(); } this._functions = defs; } - public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.Name))}"; + public override string ToString() => $"{nameof(EnabledFunctions)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {string.Join(", ", this._functions.Select(f => f.FunctionName))}"; - internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel) { + ChatToolChoice? choice = null; + List? tools = null; + OpenAIFunction[] openAIFunctions = this._openAIFunctions; - ChatCompletionsFunctionToolDefinition[] functions = this._functions; + ChatTool[] functions = this._functions; Debug.Assert(openAIFunctions.Length == functions.Length); if (openAIFunctions.Length > 0) @@ -196,7 +206,8 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o throw new KernelException($"Auto-invocation with {nameof(EnabledFunctions)} is not supported when no kernel is provided."); } - options.ToolChoice = ChatCompletionsToolChoice.Auto; + choice = ChatToolChoice.CreateAutoChoice(); + tools = []; for (int i = 0; i < openAIFunctions.Length; i++) { // Make sure that if auto-invocation is specified, every enabled function can be found in the kernel. @@ -211,9 +222,11 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o } // Add the function. - options.Tools.Add(functions[i]); + tools.Add(functions[i]); } } + + return (tools, choice); } } @@ -221,19 +234,19 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o internal sealed class RequiredFunction : ToolCallBehavior { private readonly OpenAIFunction _function; - private readonly ChatCompletionsFunctionToolDefinition _tool; - private readonly ChatCompletionsToolChoice _choice; + private readonly ChatTool _tool; + private readonly ChatToolChoice _choice; public RequiredFunction(OpenAIFunction function, bool autoInvoke) : base(autoInvoke) { this._function = function; - this._tool = new ChatCompletionsFunctionToolDefinition(function.ToFunctionDefinition()); - this._choice = new ChatCompletionsToolChoice(this._tool); + this._tool = function.ToFunctionDefinition(); + this._choice = ChatToolChoice.CreateFunctionChoice(this._tool.FunctionName); } - public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.Name}"; + public override string ToString() => $"{nameof(RequiredFunction)}(autoInvoke:{this.MaximumAutoInvokeAttempts != 0}): {this._tool.FunctionName}"; - internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions options) + internal override (IList? Tools, ChatToolChoice? Choice) ConfigureOptions(Kernel? kernel) { bool autoInvoke = base.MaximumAutoInvokeAttempts > 0; @@ -253,8 +266,7 @@ internal override void ConfigureOptions(Kernel? kernel, ChatCompletionsOptions o throw new KernelException($"The specified {nameof(RequiredFunction)} function {this._function.FullyQualifiedName} is not available in the kernel."); } - options.ToolChoice = this._choice; - options.Tools.Add(this._tool); + return ([this._tool], this._choice); } /// Gets how many requests are part of a single interaction should include this tool in the request. diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..637e276c82c1 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantGenericDataModelMapperTests.cs @@ -0,0 +1,405 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Data; +using Qdrant.Client.Grpc; +using Xunit; + +namespace Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests; + +/// +/// Contains tests for the class. +/// +public class QdrantGenericDataModelMapperTests +{ + private static readonly VectorStoreRecordDefinition s_singleVectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + private static readonly VectorStoreRecordDefinition s_multiVectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(string[])), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + private static readonly float[] s_vector1 = new float[] { 1.0f, 2.0f, 3.0f }; + private static readonly float[] s_vector2 = new float[] { 4.0f, 5.0f, 6.0f }; + private static readonly string[] s_taglist = new string[] { "tag1", "tag2" }; + private const string TestGuidKeyString = "11111111-1111-1111-1111-111111111111"; + private static readonly Guid s_testGuidKey = Guid.Parse(TestGuidKeyString); + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromDataToStorageModelMapsAllSupportedTypes(bool hasNamedVectors) + { + // Arrange. + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), hasNamedVectors ? s_multiVectorStoreRecordDefinition : s_singleVectorStoreRecordDefinition, null); + var sut = new QdrantGenericDataModelMapper(reader, hasNamedVectors); + var dataModel = new VectorStoreGenericDataModel(1ul) + { + Data = + { + ["StringDataProp"] = "string", + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["FloatDataProp"] = 5.0f, + ["NullableFloatDataProp"] = 6.0f, + ["DoubleDataProp"] = 7.0, + ["NullableDoubleDataProp"] = 8.0, + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["TagListDataProp"] = s_taglist, + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_vector1), + }, + }; + + if (hasNamedVectors) + { + dataModel.Vectors.Add("NullableFloatVector", new ReadOnlyMemory(s_vector2)); + } + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal(1ul, storageModel.Id.Num); + Assert.Equal("string", (string?)storageModel.Payload["StringDataProp"].StringValue); + Assert.Equal(1, (int?)storageModel.Payload["IntDataProp"].IntegerValue); + Assert.Equal(2, (int?)storageModel.Payload["NullableIntDataProp"].IntegerValue); + Assert.Equal(3L, (long?)storageModel.Payload["LongDataProp"].IntegerValue); + Assert.Equal(4L, (long?)storageModel.Payload["NullableLongDataProp"].IntegerValue); + Assert.Equal(5.0f, (float?)storageModel.Payload["FloatDataProp"].DoubleValue); + Assert.Equal(6.0f, (float?)storageModel.Payload["NullableFloatDataProp"].DoubleValue); + Assert.Equal(7.0, (double?)storageModel.Payload["DoubleDataProp"].DoubleValue); + Assert.Equal(8.0, (double?)storageModel.Payload["NullableDoubleDataProp"].DoubleValue); + Assert.Equal(true, (bool?)storageModel.Payload["BoolDataProp"].BoolValue); + Assert.Equal(false, (bool?)storageModel.Payload["NullableBoolDataProp"].BoolValue); + Assert.Equal(s_taglist, storageModel.Payload["TagListDataProp"].ListValue.Values.Select(x => x.StringValue).ToArray()); + + if (hasNamedVectors) + { + Assert.Equal(s_vector1, storageModel.Vectors.Vectors_.Vectors["FloatVector"].Data.ToArray()); + Assert.Equal(s_vector2, storageModel.Vectors.Vectors_.Vectors["NullableFloatVector"].Data.ToArray()); + } + else + { + Assert.Equal(s_vector1, storageModel.Vectors.Vector.Data.ToArray()); + } + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromDataToStorageModelMapsNullValues(bool hasNamedVectors) + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("NullableTagListDataProp", typeof(string[])), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel(s_testGuidKey) + { + Data = + { + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + ["NullableTagListDataProp"] = null, + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_vector1), + }, + }; + + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); + var sut = (IVectorStoreRecordMapper, PointStruct>)new QdrantGenericDataModelMapper(reader, hasNamedVectors); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal(TestGuidKeyString, storageModel.Id.Uuid); + Assert.True(storageModel.Payload["StringDataProp"].HasNullValue); + Assert.True(storageModel.Payload["NullableIntDataProp"].HasNullValue); + Assert.True(storageModel.Payload["NullableTagListDataProp"].HasNullValue); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromStorageToDataModelMapsAllSupportedTypes(bool hasNamedVectors) + { + // Arrange + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), hasNamedVectors ? s_multiVectorStoreRecordDefinition : s_singleVectorStoreRecordDefinition, null); + var sut = new QdrantGenericDataModelMapper(reader, hasNamedVectors); + var storageModel = new PointStruct() + { + Id = new PointId() { Num = 1 }, + Payload = + { + ["StringDataProp"] = new Value() { StringValue = "string" }, + ["IntDataProp"] = new Value() { IntegerValue = 1 }, + ["NullableIntDataProp"] = new Value() { IntegerValue = 2 }, + ["LongDataProp"] = new Value() { IntegerValue = 3 }, + ["NullableLongDataProp"] = new Value() { IntegerValue = 4 }, + ["FloatDataProp"] = new Value() { DoubleValue = 5.0 }, + ["NullableFloatDataProp"] = new Value() { DoubleValue = 6.0 }, + ["DoubleDataProp"] = new Value() { DoubleValue = 7.0 }, + ["NullableDoubleDataProp"] = new Value() { DoubleValue = 8.0 }, + ["BoolDataProp"] = new Value() { BoolValue = true }, + ["NullableBoolDataProp"] = new Value() { BoolValue = false }, + ["TagListDataProp"] = new Value() + { + ListValue = new ListValue() + { + Values = + { + new Value() { StringValue = "tag1" }, + new Value() { StringValue = "tag2" }, + }, + }, + }, + }, + Vectors = new Vectors() + }; + + if (hasNamedVectors) + { + storageModel.Vectors.Vectors_ = new NamedVectors(); + storageModel.Vectors.Vectors_.Vectors.Add("FloatVector", new Vector() { Data = { 1.0f, 2.0f, 3.0f } }); + storageModel.Vectors.Vectors_.Vectors.Add("NullableFloatVector", new Vector() { Data = { 4.0f, 5.0f, 6.0f } }); + } + else + { + storageModel.Vectors.Vector = new Vector() { Data = { 1.0f, 2.0f, 3.0f } }; + } + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions()); + + // Assert + Assert.Equal(1ul, dataModel.Key); + Assert.Equal("string", (string?)dataModel.Data["StringDataProp"]); + Assert.Equal(1, (int?)dataModel.Data["IntDataProp"]); + Assert.Equal(2, (int?)dataModel.Data["NullableIntDataProp"]); + Assert.Equal(3L, (long?)dataModel.Data["LongDataProp"]); + Assert.Equal(4L, (long?)dataModel.Data["NullableLongDataProp"]); + Assert.Equal(5.0f, (float?)dataModel.Data["FloatDataProp"]); + Assert.Equal(6.0f, (float?)dataModel.Data["NullableFloatDataProp"]); + Assert.Equal(7.0, (double?)dataModel.Data["DoubleDataProp"]); + Assert.Equal(8.0, (double?)dataModel.Data["NullableDoubleDataProp"]); + Assert.Equal(true, (bool?)dataModel.Data["BoolDataProp"]); + Assert.Equal(false, (bool?)dataModel.Data["NullableBoolDataProp"]); + Assert.Equal(s_taglist, (string[]?)dataModel.Data["TagListDataProp"]); + + if (hasNamedVectors) + { + Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); + Assert.Equal(s_vector2, ((ReadOnlyMemory?)dataModel.Vectors["NullableFloatVector"])!.Value.ToArray()); + } + else + { + Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); + } + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapFromStorageToDataModelMapsNullValues(bool hasNamedVectors) + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(Guid)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("NullableTagListDataProp", typeof(string[])), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var storageModel = new PointStruct() + { + Id = new PointId() { Uuid = TestGuidKeyString }, + Payload = + { + ["StringDataProp"] = new Value() { NullValue = new NullValue() }, + ["NullableIntDataProp"] = new Value() { NullValue = new NullValue() }, + ["NullableTagListDataProp"] = new Value() { NullValue = new NullValue() }, + }, + Vectors = new Vectors() + }; + + if (hasNamedVectors) + { + storageModel.Vectors.Vectors_ = new NamedVectors(); + storageModel.Vectors.Vectors_.Vectors.Add("FloatVector", new Vector() { Data = { 1.0f, 2.0f, 3.0f } }); + } + else + { + storageModel.Vectors.Vector = new Vector() { Data = { 1.0f, 2.0f, 3.0f } }; + } + + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); + var sut = (IVectorStoreRecordMapper, PointStruct>)new QdrantGenericDataModelMapper(reader, hasNamedVectors); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions()); + + // Assert + Assert.Equal(s_testGuidKey, dataModel.Key); + Assert.Null(dataModel.Data["StringDataProp"]); + Assert.Null(dataModel.Data["NullableIntDataProp"]); + Assert.Null(dataModel.Data["NullableTagListDataProp"]); + Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); + } + + [Fact] + public void MapFromDataToStorageModelThrowsForInvalidVectorType() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(ulong)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); + var sut = new QdrantGenericDataModelMapper(reader, false); + + var dataModel = new VectorStoreGenericDataModel(1ul) + { + Vectors = + { + ["FloatVector"] = "not a vector", + }, + }; + + // Act + var exception = Assert.Throws(() => sut.MapFromDataToStorageModel(dataModel)); + + // Assert + Assert.Equal("Vector property 'FloatVector' on provided record of type VectorStoreGenericDataModel must be of type ReadOnlyMemory and not null.", exception.Message); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(ulong)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); + var sut = new QdrantGenericDataModelMapper(reader, false); + + var dataModel = new VectorStoreGenericDataModel(1ul) + { + Vectors = { ["FloatVector"] = new ReadOnlyMemory(s_vector1) }, + }; + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal(1ul, storageModel.Id.Num); + Assert.False(storageModel.Payload.ContainsKey("StringDataProp")); + Assert.Equal(s_vector1, storageModel.Vectors.Vector.Data.ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(ulong)), + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + }, + }; + + var reader = new VectorStoreRecordPropertyReader(typeof(VectorStoreGenericDataModel), vectorStoreRecordDefinition, null); + var sut = new QdrantGenericDataModelMapper(reader, false); + + var storageModel = new PointStruct() + { + Id = new PointId() { Num = 1 }, + Vectors = new Vectors() + { + Vector = new Vector() { Data = { 1.0f, 2.0f, 3.0f } } + }, + }; + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new() { IncludeVectors = true }); + + // Assert + Assert.Equal(1ul, dataModel.Key); + Assert.False(dataModel.Data.ContainsKey("StringDataProp")); + Assert.Equal(s_vector1, ((ReadOnlyMemory?)dataModel.Vectors["FloatVector"])!.Value.ToArray()); + } +} diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs index 68ff1d46a86b..b871094cc1d5 100644 --- a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorStoreRecordMapperTests.cs @@ -23,7 +23,8 @@ public void MapsSinglePropsFromDataToStorageModelWithUlong(bool hasNamedVectors) { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(ulong)); - var sut = new QdrantVectorStoreRecordMapper>(definition, hasNamedVectors, s_singlePropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); // Act. var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(5ul)); @@ -51,7 +52,8 @@ public void MapsSinglePropsFromDataToStorageModelWithGuid(bool hasNamedVectors) { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(Guid)); - var sut = new QdrantVectorStoreRecordMapper>(definition, hasNamedVectors, s_singlePropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); // Act. var actual = sut.MapFromDataToStorageModel(CreateSinglePropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111"))); @@ -72,7 +74,8 @@ public void MapsSinglePropsFromStorageToDataModelWithUlong(bool hasNamedVectors, { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(ulong)); - var sut = new QdrantVectorStoreRecordMapper>(definition, hasNamedVectors, s_singlePropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); // Act. var actual = sut.MapFromStorageToDataModel(CreateSinglePropsPointStruct(5, hasNamedVectors), new() { IncludeVectors = includeVectors }); @@ -101,7 +104,8 @@ public void MapsSinglePropsFromStorageToDataModelWithGuid(bool hasNamedVectors, { // Arrange. var definition = CreateSinglePropsVectorStoreRecordDefinition(typeof(Guid)); - var sut = new QdrantVectorStoreRecordMapper>(definition, hasNamedVectors, s_singlePropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, hasNamedVectors); // Act. var actual = sut.MapFromStorageToDataModel(CreateSinglePropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111"), hasNamedVectors), new() { IncludeVectors = includeVectors }); @@ -126,7 +130,8 @@ public void MapsMultiPropsFromDataToStorageModelWithUlong() { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(ulong)); - var sut = new QdrantVectorStoreRecordMapper>(definition, true, s_multiPropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, true); // Act. var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(5ul)); @@ -151,7 +156,8 @@ public void MapsMultiPropsFromDataToStorageModelWithGuid() { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(Guid)); - var sut = new QdrantVectorStoreRecordMapper>(definition, true, s_multiPropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, true); // Act. var actual = sut.MapFromDataToStorageModel(CreateMultiPropsModel(Guid.Parse("11111111-1111-1111-1111-111111111111"))); @@ -178,7 +184,8 @@ public void MapsMultiPropsFromStorageToDataModelWithUlong(bool includeVectors) { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(ulong)); - var sut = new QdrantVectorStoreRecordMapper>(definition, true, s_multiPropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, true); // Act. var actual = sut.MapFromStorageToDataModel(CreateMultiPropsPointStruct(5), new() { IncludeVectors = includeVectors }); @@ -213,7 +220,8 @@ public void MapsMultiPropsFromStorageToDataModelWithGuid(bool includeVectors) { // Arrange. var definition = CreateMultiPropsVectorStoreRecordDefinition(typeof(Guid)); - var sut = new QdrantVectorStoreRecordMapper>(definition, true, s_multiPropsModelStorageNamesMap); + var reader = new VectorStoreRecordPropertyReader(typeof(MultiPropsModel), definition, null); + var sut = new QdrantVectorStoreRecordMapper>(reader, true); // Act. var actual = sut.MapFromStorageToDataModel(CreateMultiPropsPointStruct(Guid.Parse("11111111-1111-1111-1111-111111111111")), new() { IncludeVectors = includeVectors }); @@ -340,99 +348,78 @@ private static void AddDataToMultiPropsPointStruct(PointStruct pointStruct) pointStruct.Vectors = new Vectors() { Vectors_ = namedVectors }; } - private static readonly Dictionary s_singlePropsModelStorageNamesMap = new() - { - { "Key", "key" }, - { "Data", "data" }, - { "Vector", "vector" }, - }; - private static VectorStoreRecordDefinition CreateSinglePropsVectorStoreRecordDefinition(Type keyType) => new() { Properties = new List { - new VectorStoreRecordKeyProperty("Key", keyType), - new VectorStoreRecordDataProperty("Data", typeof(string)), - new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)), + new VectorStoreRecordKeyProperty("Key", keyType) { StoragePropertyName = "key" }, + new VectorStoreRecordDataProperty("Data", typeof(string)) { StoragePropertyName = "data" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector" }, }, }; private sealed class SinglePropsModel { - [VectorStoreRecordKey] + [VectorStoreRecordKey(StoragePropertyName = "key")] public TKey? Key { get; set; } = default; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "data")] public string Data { get; set; } = string.Empty; - [VectorStoreRecordVector] + [VectorStoreRecordVector(StoragePropertyName = "vector")] public ReadOnlyMemory? Vector { get; set; } public string NotAnnotated { get; set; } = string.Empty; } - private static readonly Dictionary s_multiPropsModelStorageNamesMap = new() - { - { "Key", "key" }, - { "DataString", "dataString" }, - { "DataInt", "dataInt" }, - { "DataLong", "dataLong" }, - { "DataFloat", "dataFloat" }, - { "DataDouble", "dataDouble" }, - { "DataBool", "dataBool" }, - { "DataArrayInt", "dataArrayInt" }, - { "Vector1", "vector1" }, - { "Vector2", "vector2" }, - }; - private static VectorStoreRecordDefinition CreateMultiPropsVectorStoreRecordDefinition(Type keyType) => new() { Properties = new List { - new VectorStoreRecordKeyProperty("Key", keyType), - new VectorStoreRecordDataProperty("DataString", typeof(string)), - new VectorStoreRecordDataProperty("DataInt", typeof(int)), - new VectorStoreRecordDataProperty("DataLong", typeof(long)), - new VectorStoreRecordDataProperty("DataFloat", typeof(float)), - new VectorStoreRecordDataProperty("DataDouble", typeof(double)), - new VectorStoreRecordDataProperty("DataBool", typeof(bool)), - new VectorStoreRecordDataProperty("DataArrayInt", typeof(List)), - new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)), - new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)), + new VectorStoreRecordKeyProperty("Key", keyType) { StoragePropertyName = "key" }, + new VectorStoreRecordDataProperty("DataString", typeof(string)) { StoragePropertyName = "dataString" }, + new VectorStoreRecordDataProperty("DataInt", typeof(int)) { StoragePropertyName = "dataInt" }, + new VectorStoreRecordDataProperty("DataLong", typeof(long)) { StoragePropertyName = "dataLong" }, + new VectorStoreRecordDataProperty("DataFloat", typeof(float)) { StoragePropertyName = "dataFloat" }, + new VectorStoreRecordDataProperty("DataDouble", typeof(double)) { StoragePropertyName = "dataDouble" }, + new VectorStoreRecordDataProperty("DataBool", typeof(bool)) { StoragePropertyName = "dataBool" }, + new VectorStoreRecordDataProperty("DataArrayInt", typeof(List)) { StoragePropertyName = "dataArrayInt" }, + new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector1" }, + new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { StoragePropertyName = "vector2" }, }, }; private sealed class MultiPropsModel { - [VectorStoreRecordKey] + [VectorStoreRecordKey(StoragePropertyName = "key")] public TKey? Key { get; set; } = default; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataString")] public string DataString { get; set; } = string.Empty; [JsonPropertyName("data_int_json")] - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataInt")] public int DataInt { get; set; } = 0; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataLong")] public long DataLong { get; set; } = 0; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataFloat")] public float DataFloat { get; set; } = 0; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataDouble")] public double DataDouble { get; set; } = 0; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataBool")] public bool DataBool { get; set; } = false; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "dataArrayInt")] public List? DataArrayInt { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(StoragePropertyName = "vector1")] public ReadOnlyMemory? Vector1 { get; set; } - [VectorStoreRecordVector] + [VectorStoreRecordVector(StoragePropertyName = "vector2")] public ReadOnlyMemory? Vector2 { get; set; } public string NotAnnotated { get; set; } = string.Empty; diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..d21ca401d005 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetGenericDataModelMapperTests.cs @@ -0,0 +1,206 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.SemanticKernel.Data; +using StackExchange.Redis; +using Xunit; + +namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; + +/// +/// Contains tests for the class. +/// +public class RedisHashSetGenericDataModelMapperTests +{ + private static readonly float[] s_floatVector = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; + private static readonly double[] s_doubleVector = new double[] { 5.0d, 6.0d, 7.0d, 8.0d }; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange. + var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringData"] = "data 1", + ["IntData"] = 1, + ["UIntData"] = 2u, + ["LongData"] = 3L, + ["ULongData"] = 4ul, + ["DoubleData"] = 5.5d, + ["FloatData"] = 6.6f, + ["BoolData"] = true, + ["NullableIntData"] = 7, + ["NullableUIntData"] = 8u, + ["NullableLongData"] = 9L, + ["NullableULongData"] = 10ul, + ["NullableDoubleData"] = 11.1d, + ["NullableFloatData"] = 12.2f, + ["NullableBoolData"] = false, + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), + }, + }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + RedisHashSetVectorStoreMappingTestHelpers.VerifyHashSet(storageModel.HashEntries); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringData"] = null, + ["NullableIntData"] = null, + }, + Vectors = + { + ["FloatVector"] = null, + }, + }; + + var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + + Assert.Equal("storage_string_data", storageModel.HashEntries[0].Name.ToString()); + Assert.True(storageModel.HashEntries[0].Value.IsNull); + + Assert.Equal("NullableIntData", storageModel.HashEntries[1].Name.ToString()); + Assert.True(storageModel.HashEntries[1].Value.IsNull); + + Assert.Equal("FloatVector", storageModel.HashEntries[2].Name.ToString()); + Assert.True(storageModel.HashEntries[2].Value.IsNull); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange. + var hashSet = RedisHashSetVectorStoreMappingTestHelpers.CreateHashSet(); + + var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); + + // Assert. + Assert.Equal("key", dataModel.Key); + Assert.Equal("data 1", dataModel.Data["StringData"]); + Assert.Equal(1, dataModel.Data["IntData"]); + Assert.Equal(2u, dataModel.Data["UIntData"]); + Assert.Equal(3L, dataModel.Data["LongData"]); + Assert.Equal(4ul, dataModel.Data["ULongData"]); + Assert.Equal(5.5d, dataModel.Data["DoubleData"]); + Assert.Equal(6.6f, dataModel.Data["FloatData"]); + Assert.True((bool)dataModel.Data["BoolData"]!); + Assert.Equal(7, dataModel.Data["NullableIntData"]); + Assert.Equal(8u, dataModel.Data["NullableUIntData"]); + Assert.Equal(9L, dataModel.Data["NullableLongData"]); + Assert.Equal(10ul, dataModel.Data["NullableULongData"]); + Assert.Equal(11.1d, dataModel.Data["NullableDoubleData"]); + Assert.Equal(12.2f, dataModel.Data["NullableFloatData"]); + Assert.False((bool)dataModel.Data["NullableBoolData"]!); + Assert.Equal(new float[] { 1, 2, 3, 4 }, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + Assert.Equal(new double[] { 5, 6, 7, 8 }, ((ReadOnlyMemory)dataModel.Vectors["DoubleVector"]!).ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange + VectorStoreRecordDefinition vectorStoreRecordDefinition = new() + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)), + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory?)), + }, + }; + + var hashSet = new HashEntry[] + { + new("storage_string_data", RedisValue.Null), + new("NullableIntData", RedisValue.Null), + new("FloatVector", RedisValue.Null), + }; + + var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); + + // Act + var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); + + // Assert + Assert.Equal("key", dataModel.Key); + Assert.Null(dataModel.Data["StringData"]); + Assert.Null(dataModel.Data["NullableIntData"]); + Assert.Null(dataModel.Vectors["FloatVector"]); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange. + var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = { }, + Vectors = { }, + }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + Assert.Empty(storageModel.HashEntries); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange. + var hashSet = Array.Empty(); + + var sut = new RedisHashSetGenericDataModelMapper(RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition.Properties); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", hashSet), new() { IncludeVectors = true }); + + // Assert. + Assert.Equal("key", dataModel.Key); + Assert.Empty(dataModel.Data); + Assert.Empty(dataModel.Vectors); + } +} diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs new file mode 100644 index 000000000000..e42537f33b78 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreMappingTestHelpers.cs @@ -0,0 +1,119 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Runtime.InteropServices; +using Microsoft.SemanticKernel.Data; +using StackExchange.Redis; +using Xunit; + +namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; + +/// +/// Contains helper methods and data for testing the mapping of records between storage and data models. +/// These helpers are shared between the different mapping tests. +/// +internal static class RedisHashSetVectorStoreMappingTestHelpers +{ + public static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() + { + Properties = new List() + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, + new VectorStoreRecordDataProperty("IntData", typeof(int)), + new VectorStoreRecordDataProperty("UIntData", typeof(uint)), + new VectorStoreRecordDataProperty("LongData", typeof(long)), + new VectorStoreRecordDataProperty("ULongData", typeof(ulong)), + new VectorStoreRecordDataProperty("DoubleData", typeof(double)), + new VectorStoreRecordDataProperty("FloatData", typeof(float)), + new VectorStoreRecordDataProperty("BoolData", typeof(bool)), + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordDataProperty("NullableUIntData", typeof(uint?)), + new VectorStoreRecordDataProperty("NullableLongData", typeof(long?)), + new VectorStoreRecordDataProperty("NullableULongData", typeof(ulong?)), + new VectorStoreRecordDataProperty("NullableDoubleData", typeof(double?)), + new VectorStoreRecordDataProperty("NullableFloatData", typeof(float?)), + new VectorStoreRecordDataProperty("NullableBoolData", typeof(bool?)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), + } + }; + + public static HashEntry[] CreateHashSet() + { + var hashSet = new HashEntry[17]; + hashSet[0] = new HashEntry("storage_string_data", "data 1"); + hashSet[1] = new HashEntry("IntData", 1); + hashSet[2] = new HashEntry("UIntData", 2); + hashSet[3] = new HashEntry("LongData", 3); + hashSet[4] = new HashEntry("ULongData", 4); + hashSet[5] = new HashEntry("DoubleData", 5.5); + hashSet[6] = new HashEntry("FloatData", 6.6); + hashSet[7] = new HashEntry("BoolData", true); + hashSet[8] = new HashEntry("NullableIntData", 7); + hashSet[9] = new HashEntry("NullableUIntData", 8); + hashSet[10] = new HashEntry("NullableLongData", 9); + hashSet[11] = new HashEntry("NullableULongData", 10); + hashSet[12] = new HashEntry("NullableDoubleData", 11.1); + hashSet[13] = new HashEntry("NullableFloatData", 12.2); + hashSet[14] = new HashEntry("NullableBoolData", false); + hashSet[15] = new HashEntry("FloatVector", MemoryMarshal.AsBytes(new ReadOnlySpan(new float[] { 1, 2, 3, 4 })).ToArray()); + hashSet[16] = new HashEntry("DoubleVector", MemoryMarshal.AsBytes(new ReadOnlySpan(new double[] { 5, 6, 7, 8 })).ToArray()); + return hashSet; + } + + public static void VerifyHashSet(HashEntry[] hashEntries) + { + Assert.Equal("storage_string_data", hashEntries[0].Name.ToString()); + Assert.Equal("data 1", hashEntries[0].Value.ToString()); + + Assert.Equal("IntData", hashEntries[1].Name.ToString()); + Assert.Equal(1, (int)hashEntries[1].Value); + + Assert.Equal("UIntData", hashEntries[2].Name.ToString()); + Assert.Equal(2u, (uint)hashEntries[2].Value); + + Assert.Equal("LongData", hashEntries[3].Name.ToString()); + Assert.Equal(3, (long)hashEntries[3].Value); + + Assert.Equal("ULongData", hashEntries[4].Name.ToString()); + Assert.Equal(4ul, (ulong)hashEntries[4].Value); + + Assert.Equal("DoubleData", hashEntries[5].Name.ToString()); + Assert.Equal(5.5d, (double)hashEntries[5].Value); + + Assert.Equal("FloatData", hashEntries[6].Name.ToString()); + Assert.Equal(6.6f, (float)hashEntries[6].Value); + + Assert.Equal("BoolData", hashEntries[7].Name.ToString()); + Assert.True((bool)hashEntries[7].Value); + + Assert.Equal("NullableIntData", hashEntries[8].Name.ToString()); + Assert.Equal(7, (int)hashEntries[8].Value); + + Assert.Equal("NullableUIntData", hashEntries[9].Name.ToString()); + Assert.Equal(8u, (uint)hashEntries[9].Value); + + Assert.Equal("NullableLongData", hashEntries[10].Name.ToString()); + Assert.Equal(9, (long)hashEntries[10].Value); + + Assert.Equal("NullableULongData", hashEntries[11].Name.ToString()); + Assert.Equal(10ul, (ulong)hashEntries[11].Value); + + Assert.Equal("NullableDoubleData", hashEntries[12].Name.ToString()); + Assert.Equal(11.1d, (double)hashEntries[12].Value); + + Assert.Equal("NullableFloatData", hashEntries[13].Name.ToString()); + Assert.Equal(12.2f, (float)hashEntries[13].Value); + + Assert.Equal("NullableBoolData", hashEntries[14].Name.ToString()); + Assert.False((bool)hashEntries[14].Value); + + Assert.Equal("FloatVector", hashEntries[15].Name.ToString()); + Assert.Equal(new float[] { 1, 2, 3, 4 }, MemoryMarshal.Cast((byte[])hashEntries[15].Value!).ToArray()); + + Assert.Equal("DoubleVector", hashEntries[16].Name.ToString()); + Assert.Equal(new double[] { 5, 6, 7, 8 }, MemoryMarshal.Cast((byte[])hashEntries[16].Value!).ToArray()); + } +} diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs index a95179e86346..2f3676059a1c 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -82,15 +82,15 @@ public async Task CanCreateCollectionAsync() 1, "testcollection:", "SCHEMA", - "$.OriginalNameData", + "OriginalNameData", "AS", "OriginalNameData", "TAG", - "$.data_storage_name", + "data_storage_name", "AS", "data_storage_name", "TAG", - "$.vector_storage_name", + "vector_storage_name", "AS", "vector_storage_name", "VECTOR", diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs index fd7a56d8765c..eb337cc4c934 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisHashSetVectorStoreRecordMapperTests.cs @@ -1,11 +1,9 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections.Generic; -using System.Runtime.InteropServices; using Microsoft.SemanticKernel.Connectors.Redis; +using Microsoft.SemanticKernel.Connectors.Redis.UnitTests; using Microsoft.SemanticKernel.Data; -using StackExchange.Redis; using Xunit; namespace SemanticKernel.Connectors.Redis.UnitTests; @@ -19,7 +17,8 @@ public sealed class RedisHashSetVectorStoreRecordMapperTests public void MapsAllFieldsFromDataToStorageModel() { // Arrange. - var sut = new RedisHashSetVectorStoreRecordMapper(s_vectorStoreRecordDefinition, s_storagePropertyNames); + var reader = new VectorStoreRecordPropertyReader(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition, null); + var sut = new RedisHashSetVectorStoreRecordMapper(reader); // Act. var actual = sut.MapFromDataToStorageModel(CreateModel("test key")); @@ -27,67 +26,18 @@ public void MapsAllFieldsFromDataToStorageModel() // Assert. Assert.NotNull(actual.HashEntries); Assert.Equal("test key", actual.Key); - - Assert.Equal("storage_string_data", actual.HashEntries[0].Name.ToString()); - Assert.Equal("data 1", actual.HashEntries[0].Value.ToString()); - - Assert.Equal("IntData", actual.HashEntries[1].Name.ToString()); - Assert.Equal(1, (int)actual.HashEntries[1].Value); - - Assert.Equal("UIntData", actual.HashEntries[2].Name.ToString()); - Assert.Equal(2u, (uint)actual.HashEntries[2].Value); - - Assert.Equal("LongData", actual.HashEntries[3].Name.ToString()); - Assert.Equal(3, (long)actual.HashEntries[3].Value); - - Assert.Equal("ULongData", actual.HashEntries[4].Name.ToString()); - Assert.Equal(4ul, (ulong)actual.HashEntries[4].Value); - - Assert.Equal("DoubleData", actual.HashEntries[5].Name.ToString()); - Assert.Equal(5.5d, (double)actual.HashEntries[5].Value); - - Assert.Equal("FloatData", actual.HashEntries[6].Name.ToString()); - Assert.Equal(6.6f, (float)actual.HashEntries[6].Value); - - Assert.Equal("BoolData", actual.HashEntries[7].Name.ToString()); - Assert.True((bool)actual.HashEntries[7].Value); - - Assert.Equal("NullableIntData", actual.HashEntries[8].Name.ToString()); - Assert.Equal(7, (int)actual.HashEntries[8].Value); - - Assert.Equal("NullableUIntData", actual.HashEntries[9].Name.ToString()); - Assert.Equal(8u, (uint)actual.HashEntries[9].Value); - - Assert.Equal("NullableLongData", actual.HashEntries[10].Name.ToString()); - Assert.Equal(9, (long)actual.HashEntries[10].Value); - - Assert.Equal("NullableULongData", actual.HashEntries[11].Name.ToString()); - Assert.Equal(10ul, (ulong)actual.HashEntries[11].Value); - - Assert.Equal("NullableDoubleData", actual.HashEntries[12].Name.ToString()); - Assert.Equal(11.1d, (double)actual.HashEntries[12].Value); - - Assert.Equal("NullableFloatData", actual.HashEntries[13].Name.ToString()); - Assert.Equal(12.2f, (float)actual.HashEntries[13].Value); - - Assert.Equal("NullableBoolData", actual.HashEntries[14].Name.ToString()); - Assert.False((bool)actual.HashEntries[14].Value); - - Assert.Equal("FloatVector", actual.HashEntries[15].Name.ToString()); - Assert.Equal(new float[] { 1, 2, 3, 4 }, MemoryMarshal.Cast((byte[])actual.HashEntries[15].Value!).ToArray()); - - Assert.Equal("DoubleVector", actual.HashEntries[16].Name.ToString()); - Assert.Equal(new double[] { 5, 6, 7, 8 }, MemoryMarshal.Cast((byte[])actual.HashEntries[16].Value!).ToArray()); + RedisHashSetVectorStoreMappingTestHelpers.VerifyHashSet(actual.HashEntries); } [Fact] public void MapsAllFieldsFromStorageToDataModel() { // Arrange. - var sut = new RedisHashSetVectorStoreRecordMapper(s_vectorStoreRecordDefinition, s_storagePropertyNames); + var reader = new VectorStoreRecordPropertyReader(typeof(AllTypesModel), RedisHashSetVectorStoreMappingTestHelpers.s_vectorStoreRecordDefinition, null); + var sut = new RedisHashSetVectorStoreRecordMapper(reader); // Act. - var actual = sut.MapFromStorageToDataModel(("test key", CreateHashSet()), new() { IncludeVectors = true }); + var actual = sut.MapFromStorageToDataModel(("test key", RedisHashSetVectorStoreMappingTestHelpers.CreateHashSet()), new() { IncludeVectors = true }); // Assert. Assert.NotNull(actual); @@ -138,81 +88,12 @@ private static AllTypesModel CreateModel(string key) }; } - private static HashEntry[] CreateHashSet() - { - var hashSet = new HashEntry[17]; - hashSet[0] = new HashEntry("storage_string_data", "data 1"); - hashSet[1] = new HashEntry("IntData", 1); - hashSet[2] = new HashEntry("UIntData", 2); - hashSet[3] = new HashEntry("LongData", 3); - hashSet[4] = new HashEntry("ULongData", 4); - hashSet[5] = new HashEntry("DoubleData", 5.5); - hashSet[6] = new HashEntry("FloatData", 6.6); - hashSet[7] = new HashEntry("BoolData", true); - hashSet[8] = new HashEntry("NullableIntData", 7); - hashSet[9] = new HashEntry("NullableUIntData", 8); - hashSet[10] = new HashEntry("NullableLongData", 9); - hashSet[11] = new HashEntry("NullableULongData", 10); - hashSet[12] = new HashEntry("NullableDoubleData", 11.1); - hashSet[13] = new HashEntry("NullableFloatData", 12.2); - hashSet[14] = new HashEntry("NullableBoolData", false); - hashSet[15] = new HashEntry("FloatVector", MemoryMarshal.AsBytes(new ReadOnlySpan(new float[] { 1, 2, 3, 4 })).ToArray()); - hashSet[16] = new HashEntry("DoubleVector", MemoryMarshal.AsBytes(new ReadOnlySpan(new double[] { 5, 6, 7, 8 })).ToArray()); - return hashSet; - } - - private static readonly Dictionary s_storagePropertyNames = new() - { - ["StringData"] = "storage_string_data", - ["IntData"] = "IntData", - ["UIntData"] = "UIntData", - ["LongData"] = "LongData", - ["ULongData"] = "ULongData", - ["DoubleData"] = "DoubleData", - ["FloatData"] = "FloatData", - ["BoolData"] = "BoolData", - ["NullableIntData"] = "NullableIntData", - ["NullableUIntData"] = "NullableUIntData", - ["NullableLongData"] = "NullableLongData", - ["NullableULongData"] = "NullableULongData", - ["NullableDoubleData"] = "NullableDoubleData", - ["NullableFloatData"] = "NullableFloatData", - ["NullableBoolData"] = "NullableBoolData", - ["FloatVector"] = "FloatVector", - ["DoubleVector"] = "DoubleVector", - }; - - private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() - { - Properties = new List() - { - new VectorStoreRecordKeyProperty("Key", typeof(string)), - new VectorStoreRecordDataProperty("StringData", typeof(string)), - new VectorStoreRecordDataProperty("IntData", typeof(int)), - new VectorStoreRecordDataProperty("UIntData", typeof(uint)), - new VectorStoreRecordDataProperty("LongData", typeof(long)), - new VectorStoreRecordDataProperty("ULongData", typeof(ulong)), - new VectorStoreRecordDataProperty("DoubleData", typeof(double)), - new VectorStoreRecordDataProperty("FloatData", typeof(float)), - new VectorStoreRecordDataProperty("BoolData", typeof(bool)), - new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), - new VectorStoreRecordDataProperty("NullableUIntData", typeof(uint?)), - new VectorStoreRecordDataProperty("NullableLongData", typeof(long?)), - new VectorStoreRecordDataProperty("NullableULongData", typeof(ulong?)), - new VectorStoreRecordDataProperty("NullableDoubleData", typeof(double?)), - new VectorStoreRecordDataProperty("NullableFloatData", typeof(float?)), - new VectorStoreRecordDataProperty("NullableBoolData", typeof(bool?)), - new VectorStoreRecordVectorProperty("FloatVector", typeof(float)), - new VectorStoreRecordVectorProperty("DoubleVector", typeof(double)), - } - }; - private sealed class AllTypesModel { [VectorStoreRecordKey] public string Key { get; set; } = string.Empty; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "storage_string_data")] public string StringData { get; set; } = string.Empty; [VectorStoreRecordData] diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..724a5cec9e63 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisJsonGenericDataModelMapperTests.cs @@ -0,0 +1,187 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; + +/// +/// Contains tests for the class. +/// +public class RedisJsonGenericDataModelMapperTests +{ + private static readonly float[] s_floatVector = new float[] { 1.0f, 2.0f, 3.0f, 4.0f }; + + private static readonly VectorStoreRecordDefinition s_vectorStoreRecordDefinition = new() + { + Properties = new List() + { + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("StringData", typeof(string)) { StoragePropertyName = "storage_string_data" }, + new VectorStoreRecordDataProperty("IntData", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntData", typeof(int?)), + new VectorStoreRecordDataProperty("ComplexObjectData", typeof(ComplexObject)), + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + } + }; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange. + var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringData"] = "data 1", + ["IntData"] = 1, + ["NullableIntData"] = 2, + ["ComplexObjectData"] = new ComplexObject { Prop1 = "prop 1", Prop2 = "prop 2" }, + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + }, + }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + Assert.Equal("data 1", (string)storageModel.Node["storage_string_data"]!); + Assert.Equal(1, (int)storageModel.Node["IntData"]!); + Assert.Equal(2, (int?)storageModel.Node["NullableIntData"]!); + Assert.Equal("prop 1", (string)storageModel.Node["ComplexObjectData"]!.AsObject()["Prop1"]!); + Assert.Equal(new float[] { 1, 2, 3, 4 }, storageModel.Node["FloatVector"]?.AsArray().GetValues().ToArray()); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange. + var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = + { + ["StringData"] = null, + ["IntData"] = null, + ["NullableIntData"] = null, + ["ComplexObjectData"] = null, + }, + Vectors = + { + ["FloatVector"] = null, + }, + }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + Assert.Null(storageModel.Node["storage_string_data"]); + Assert.Null(storageModel.Node["IntData"]); + Assert.Null(storageModel.Node["NullableIntData"]); + Assert.Null(storageModel.Node["ComplexObjectData"]); + Assert.Null(storageModel.Node["FloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange. + var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var storageModel = new JsonObject(); + storageModel.Add("storage_string_data", "data 1"); + storageModel.Add("IntData", 1); + storageModel.Add("NullableIntData", 2); + storageModel.Add("ComplexObjectData", new JsonObject(new KeyValuePair[] { new("Prop1", JsonValue.Create("prop 1")), new("Prop2", JsonValue.Create("prop 2")) })); + storageModel.Add("FloatVector", new JsonArray(new[] { 1, 2, 3, 4 }.Select(x => JsonValue.Create(x)).ToArray())); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); + + // Assert. + Assert.Equal("key", dataModel.Key); + Assert.Equal("data 1", dataModel.Data["StringData"]); + Assert.Equal(1, dataModel.Data["IntData"]); + Assert.Equal(2, dataModel.Data["NullableIntData"]); + Assert.Equal("prop 1", ((ComplexObject)dataModel.Data["ComplexObjectData"]!).Prop1); + Assert.Equal(new float[] { 1, 2, 3, 4 }, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange. + var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var storageModel = new JsonObject(); + storageModel.Add("storage_string_data", null); + storageModel.Add("IntData", null); + storageModel.Add("NullableIntData", null); + storageModel.Add("ComplexObjectData", null); + storageModel.Add("FloatVector", null); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); + + // Assert. + Assert.Equal("key", dataModel.Key); + Assert.Null(dataModel.Data["StringData"]); + Assert.Null(dataModel.Data["IntData"]); + Assert.Null(dataModel.Data["NullableIntData"]); + Assert.Null(dataModel.Data["ComplexObjectData"]); + Assert.Null(dataModel.Vectors["FloatVector"]); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange. + var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + var dataModel = new VectorStoreGenericDataModel("key") + { + Data = { }, + Vectors = { }, + }; + + // Act. + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal("key", storageModel.Key); + Assert.Empty(storageModel.Node.AsObject()); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange. + var storageModel = new JsonObject(); + + var sut = new RedisJsonGenericDataModelMapper(s_vectorStoreRecordDefinition.Properties, JsonSerializerOptions.Default); + + // Act. + var dataModel = sut.MapFromStorageToDataModel(("key", storageModel), new() { IncludeVectors = true }); + + // Assert. + Assert.Equal("key", dataModel.Key); + Assert.Empty(dataModel.Data); + Assert.Empty(dataModel.Vectors); + } + + private sealed class ComplexObject + { + public string Prop1 { get; set; } = string.Empty; + + public string Prop2 { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs index c5bb3b12b2c5..b7c537103858 100644 --- a/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs +++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisVectorStoreCollectionCreateMappingTests.cs @@ -14,8 +14,10 @@ namespace Microsoft.SemanticKernel.Connectors.Redis.UnitTests; /// public class RedisVectorStoreCollectionCreateMappingTests { - [Fact] - public void MapToSchemaCreatesSchema() + [Theory] + [InlineData(true)] + [InlineData(false)] + public void MapToSchemaCreatesSchema(bool useDollarPrefix) { // Arrange. var properties = new VectorStoreRecordProperty[] @@ -50,7 +52,7 @@ public void MapToSchemaCreatesSchema() }; // Act. - var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, storagePropertyNames); + var schema = RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, storagePropertyNames, useDollarPrefix); // Assert. Assert.NotNull(schema); @@ -65,16 +67,32 @@ public void MapToSchemaCreatesSchema() Assert.IsType(schema.Fields[6]); Assert.IsType(schema.Fields[7]); - VerifyFieldName(schema.Fields[0].FieldName, new List { "$.FilterableString", "AS", "FilterableString" }); - VerifyFieldName(schema.Fields[1].FieldName, new List { "$.FullTextSearchableString", "AS", "FullTextSearchableString" }); - VerifyFieldName(schema.Fields[2].FieldName, new List { "$.FilterableStringEnumerable.*", "AS", "FilterableStringEnumerable" }); - VerifyFieldName(schema.Fields[3].FieldName, new List { "$.FullTextSearchableStringEnumerable", "AS", "FullTextSearchableStringEnumerable" }); + if (useDollarPrefix) + { + VerifyFieldName(schema.Fields[0].FieldName, new List { "$.FilterableString", "AS", "FilterableString" }); + VerifyFieldName(schema.Fields[1].FieldName, new List { "$.FullTextSearchableString", "AS", "FullTextSearchableString" }); + VerifyFieldName(schema.Fields[2].FieldName, new List { "$.FilterableStringEnumerable.*", "AS", "FilterableStringEnumerable" }); + VerifyFieldName(schema.Fields[3].FieldName, new List { "$.FullTextSearchableStringEnumerable", "AS", "FullTextSearchableStringEnumerable" }); + + VerifyFieldName(schema.Fields[4].FieldName, new List { "$.FilterableInt", "AS", "FilterableInt" }); + VerifyFieldName(schema.Fields[5].FieldName, new List { "$.FilterableNullableInt", "AS", "FilterableNullableInt" }); + + VerifyFieldName(schema.Fields[6].FieldName, new List { "$.VectorDefaultIndexingOptions", "AS", "VectorDefaultIndexingOptions" }); + VerifyFieldName(schema.Fields[7].FieldName, new List { "$.vector_specific_indexing_options", "AS", "vector_specific_indexing_options" }); + } + else + { + VerifyFieldName(schema.Fields[0].FieldName, new List { "FilterableString", "AS", "FilterableString" }); + VerifyFieldName(schema.Fields[1].FieldName, new List { "FullTextSearchableString", "AS", "FullTextSearchableString" }); + VerifyFieldName(schema.Fields[2].FieldName, new List { "FilterableStringEnumerable.*", "AS", "FilterableStringEnumerable" }); + VerifyFieldName(schema.Fields[3].FieldName, new List { "FullTextSearchableStringEnumerable", "AS", "FullTextSearchableStringEnumerable" }); - VerifyFieldName(schema.Fields[4].FieldName, new List { "$.FilterableInt", "AS", "FilterableInt" }); - VerifyFieldName(schema.Fields[5].FieldName, new List { "$.FilterableNullableInt", "AS", "FilterableNullableInt" }); + VerifyFieldName(schema.Fields[4].FieldName, new List { "FilterableInt", "AS", "FilterableInt" }); + VerifyFieldName(schema.Fields[5].FieldName, new List { "FilterableNullableInt", "AS", "FilterableNullableInt" }); - VerifyFieldName(schema.Fields[6].FieldName, new List { "$.VectorDefaultIndexingOptions", "AS", "VectorDefaultIndexingOptions" }); - VerifyFieldName(schema.Fields[7].FieldName, new List { "$.vector_specific_indexing_options", "AS", "vector_specific_indexing_options" }); + VerifyFieldName(schema.Fields[6].FieldName, new List { "VectorDefaultIndexingOptions", "AS", "VectorDefaultIndexingOptions" }); + VerifyFieldName(schema.Fields[7].FieldName, new List { "vector_specific_indexing_options", "AS", "vector_specific_indexing_options" }); + } Assert.Equal("10", ((VectorField)schema.Fields[6]).Attributes!["DIM"]); Assert.Equal("FLOAT32", ((VectorField)schema.Fields[6]).Attributes!["TYPE"]); @@ -95,7 +113,7 @@ public void MapToSchemaThrowsOnInvalidVectorDimensions(int? dimensions) var storagePropertyNames = new Dictionary() { { "VectorProperty", "VectorProperty" } }; // Act and assert. - Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, storagePropertyNames)); + Assert.Throws(() => RedisVectorStoreCollectionCreateMapping.MapToSchema(properties, storagePropertyNames, true)); } [Fact] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj index a4b7bd6ace44..17ac2e2510a9 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj +++ b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj @@ -12,9 +12,9 @@ - - - + + + runtime; build; native; contentfiles; analyzers; buildtransitive all @@ -29,31 +29,23 @@ - - + + + - - - - - - - - - - - - - - - - - Always - + + + + + + + + + + + diff --git a/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs b/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs deleted file mode 100644 index d7e81f129c9c..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/MultipleHttpMessageHandlerStub.cs +++ /dev/null @@ -1,53 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Threading; -using System.Threading.Tasks; - -namespace SemanticKernel.Connectors.UnitTests; - -internal sealed class MultipleHttpMessageHandlerStub : DelegatingHandler -{ - private int _callIteration = 0; - - public List RequestHeaders { get; private set; } - - public List ContentHeaders { get; private set; } - - public List RequestContents { get; private set; } - - public List RequestUris { get; private set; } - - public List Methods { get; private set; } - - public List ResponsesToReturn { get; set; } - - public MultipleHttpMessageHandlerStub() - { - this.RequestHeaders = []; - this.ContentHeaders = []; - this.RequestContents = []; - this.RequestUris = []; - this.Methods = []; - this.ResponsesToReturn = []; - } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - this._callIteration++; - - this.Methods.Add(request.Method); - this.RequestUris.Add(request.RequestUri); - this.RequestHeaders.Add(request.Headers); - this.ContentHeaders.Add(request.Content?.Headers); - - var content = request.Content is null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken); - - this.RequestContents.Add(content); - - return await Task.FromResult(this.ResponsesToReturn[this._callIteration - 1]); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs deleted file mode 100644 index 39bc2803fe19..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AIServicesOpenAIExtensionsTests.cs +++ /dev/null @@ -1,88 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.TextGeneration; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests of . -/// -public class AIServicesOpenAIExtensionsTests -{ - [Fact] - public void ItSucceedsWhenAddingDifferentServiceTypeWithSameId() - { - Kernel targetKernel = Kernel.CreateBuilder() - .AddAzureOpenAITextGeneration("depl", "https://url", "key", "azure") - .AddAzureOpenAITextEmbeddingGeneration("depl2", "https://url", "key", "azure") - .Build(); - - Assert.NotNull(targetKernel.GetRequiredService("azure")); - Assert.NotNull(targetKernel.GetRequiredService("azure")); - } - - [Fact] - public void ItTellsIfAServiceIsAvailable() - { - Kernel targetKernel = Kernel.CreateBuilder() - .AddAzureOpenAITextGeneration("depl", "https://url", "key", serviceId: "azure") - .AddOpenAITextGeneration("model", "apikey", serviceId: "oai") - .AddAzureOpenAITextEmbeddingGeneration("depl2", "https://url2", "key", serviceId: "azure") - .AddOpenAITextEmbeddingGeneration("model2", "apikey2", serviceId: "oai2") - .Build(); - - // Assert - Assert.NotNull(targetKernel.GetRequiredService("azure")); - Assert.NotNull(targetKernel.GetRequiredService("oai")); - Assert.NotNull(targetKernel.GetRequiredService("azure")); - Assert.NotNull(targetKernel.GetRequiredService("oai")); - } - - [Fact] - public void ItCanOverwriteServices() - { - // Arrange - // Act - Assert no exception occurs - var builder = Kernel.CreateBuilder(); - - builder.Services.AddAzureOpenAITextGeneration("depl", "https://localhost", "key", serviceId: "one"); - builder.Services.AddAzureOpenAITextGeneration("depl", "https://localhost", "key", serviceId: "one"); - - builder.Services.AddOpenAITextGeneration("model", "key", serviceId: "one"); - builder.Services.AddOpenAITextGeneration("model", "key", serviceId: "one"); - - builder.Services.AddAzureOpenAITextEmbeddingGeneration("dep", "https://localhost", "key", serviceId: "one"); - builder.Services.AddAzureOpenAITextEmbeddingGeneration("dep", "https://localhost", "key", serviceId: "one"); - - builder.Services.AddOpenAITextEmbeddingGeneration("model", "key", serviceId: "one"); - builder.Services.AddOpenAITextEmbeddingGeneration("model", "key", serviceId: "one"); - - builder.Services.AddAzureOpenAIChatCompletion("dep", "https://localhost", "key", serviceId: "one"); - builder.Services.AddAzureOpenAIChatCompletion("dep", "https://localhost", "key", serviceId: "one"); - - builder.Services.AddOpenAIChatCompletion("model", "key", serviceId: "one"); - builder.Services.AddOpenAIChatCompletion("model", "key", serviceId: "one"); - - builder.Services.AddOpenAITextToImage("model", "key", serviceId: "one"); - builder.Services.AddOpenAITextToImage("model", "key", serviceId: "one"); - - builder.Services.AddSingleton(new OpenAITextGenerationService("model", "key")); - builder.Services.AddSingleton(new OpenAITextGenerationService("model", "key")); - - builder.Services.AddSingleton((_) => new OpenAITextGenerationService("model", "key")); - builder.Services.AddSingleton((_) => new OpenAITextGenerationService("model", "key")); - - builder.Services.AddKeyedSingleton("one", new OpenAITextGenerationService("model", "key")); - builder.Services.AddKeyedSingleton("one", new OpenAITextGenerationService("model", "key")); - - builder.Services.AddKeyedSingleton("one", (_, _) => new OpenAITextGenerationService("model", "key")); - builder.Services.AddKeyedSingleton("one", (_, _) => new OpenAITextGenerationService("model", "key")); - - builder.Build(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs deleted file mode 100644 index f3dd1850d56e..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataChatMessageContentTests.cs +++ /dev/null @@ -1,120 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections; -using System.Collections.Generic; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAIWithDataChatMessageContentTests -{ - [Fact] - public void ConstructorThrowsExceptionWhenAssistantMessageIsNotProvided() - { - // Arrange - var choice = new ChatWithDataChoice(); - - // Act & Assert - var exception = Assert.Throws(() => new AzureOpenAIWithDataChatMessageContent(choice, "model-id")); - - Assert.Contains("Chat is not valid", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void ConstructorReturnsInstanceWithNullToolContent() - { - // Arrange - var choice = new ChatWithDataChoice { Messages = [new() { Content = "Assistant content", Role = "assistant" }] }; - - // Act - var content = new AzureOpenAIWithDataChatMessageContent(choice, "model-id"); - - // Assert - Assert.Equal("Assistant content", content.Content); - Assert.Equal(AuthorRole.Assistant, content.Role); - - Assert.Null(content.ToolContent); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorReturnsInstanceWithNonNullToolContent(bool includeMetadata) - { - // Arrange - var choice = new ChatWithDataChoice - { - Messages = [ - new() { Content = "Assistant content", Role = "assistant" }, - new() { Content = "Tool content", Role = "tool" }] - }; - - // Act - var content = includeMetadata ? - new AzureOpenAIWithDataChatMessageContent(choice, "model-id", new Dictionary()) : - new AzureOpenAIWithDataChatMessageContent(choice, "model-id"); - - // Assert - Assert.Equal("Assistant content", content.Content); - Assert.Equal("Tool content", content.ToolContent); - Assert.Equal(AuthorRole.Assistant, content.Role); - - Assert.NotNull(content.Metadata); - Assert.Equal("Tool content", content.Metadata["ToolContent"]); - } - - [Fact] - public void ConstructorCloneReadOnlyMetadataDictionary() - { - // Arrange - var choice = new ChatWithDataChoice - { - Messages = [new() { Content = "Assistant content", Role = "assistant" }] - }; - - var metadata = new ReadOnlyInternalDictionary(new Dictionary() { ["Extra"] = "Data" }); - - // Act - var content = new AzureOpenAIWithDataChatMessageContent(choice, "model-id", metadata); - - // Assert - Assert.Equal("Assistant content", content.Content); - Assert.Equal(AuthorRole.Assistant, content.Role); - - Assert.NotNull(content.Metadata); - Assert.Equal("Data", content.Metadata["Extra"]); - } - - private sealed class ReadOnlyInternalDictionary : IReadOnlyDictionary - { - public ReadOnlyInternalDictionary(IDictionary initializingData) - { - this._internalDictionary = new Dictionary(initializingData); - } - private readonly Dictionary _internalDictionary; - - public object? this[string key] => this._internalDictionary[key]; - - public IEnumerable Keys => this._internalDictionary.Keys; - - public IEnumerable Values => this._internalDictionary.Values; - - public int Count => this._internalDictionary.Count; - - public bool ContainsKey(string key) => this._internalDictionary.ContainsKey(key); - - public IEnumerator> GetEnumerator() => this._internalDictionary.GetEnumerator(); - - public bool TryGetValue(string key, out object? value) => this._internalDictionary.TryGetValue(key, out value); - - IEnumerator IEnumerable.GetEnumerator() => this._internalDictionary.GetEnumerator(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs deleted file mode 100644 index 45597c616270..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/AzureOpenAIWithDataStreamingChatMessageContentTests.cs +++ /dev/null @@ -1,61 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAIWithDataStreamingChatMessageContentTests -{ - [Theory] - [MemberData(nameof(ValidChoices))] - public void ConstructorWithValidChoiceSetsNonEmptyContent(object choice, string expectedContent) - { - // Arrange - var streamingChoice = choice as ChatWithDataStreamingChoice; - - // Act - var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); - - // Assert - Assert.Equal(expectedContent, content.Content); - } - - [Theory] - [MemberData(nameof(InvalidChoices))] - public void ConstructorWithInvalidChoiceSetsNullContent(object choice) - { - // Arrange - var streamingChoice = choice as ChatWithDataStreamingChoice; - - // Act - var content = new AzureOpenAIWithDataStreamingChatMessageContent(streamingChoice!, 0, "model-id"); - - // Assert - Assert.Null(content.Content); - } - - public static IEnumerable ValidChoices - { - get - { - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 1" } }] }, "Content 1" }; - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content 2", Role = "Assistant" } }] }, "Content 2" }; - } - } - - public static IEnumerable InvalidChoices - { - get - { - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { EndTurn = true }] } }; - yield return new object[] { new ChatWithDataStreamingChoice { Messages = [new() { Delta = new() { Content = "Content", Role = "tool" } }] } }; - } - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs deleted file mode 100644 index fd0a830cc2d9..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/OpenAIStreamingTextContentTests.cs +++ /dev/null @@ -1,42 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Text; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -/// -/// Unit tests for class. -/// -public sealed class OpenAIStreamingTextContentTests -{ - [Fact] - public void ToByteArrayWorksCorrectly() - { - // Arrange - var expectedBytes = Encoding.UTF8.GetBytes("content"); - var content = new OpenAIStreamingTextContent("content", 0, "model-id"); - - // Act - var actualBytes = content.ToByteArray(); - - // Assert - Assert.Equal(expectedBytes, actualBytes); - } - - [Theory] - [InlineData(null, "")] - [InlineData("content", "content")] - public void ToStringWorksCorrectly(string? content, string expectedString) - { - // Arrange - var textContent = new OpenAIStreamingTextContent(content!, 0, "model-id"); - - // Act - var actualString = textContent.ToString(); - - // Assert - Assert.Equal(expectedString, actualString); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs deleted file mode 100644 index 54a183eca330..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AzureSdk/RequestFailedExceptionExtensionsTests.cs +++ /dev/null @@ -1,78 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using Azure; -using Azure.Core; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.AzureSdk; - -/// -/// Unit tests for class. -/// -public sealed class RequestFailedExceptionExtensionsTests -{ - [Theory] - [InlineData(0, null)] - [InlineData(500, HttpStatusCode.InternalServerError)] - public void ToHttpOperationExceptionWithStatusReturnsValidException(int responseStatus, HttpStatusCode? httpStatusCode) - { - // Arrange - var exception = new RequestFailedException(responseStatus, "Error Message"); - - // Act - var actualException = exception.ToHttpOperationException(); - - // Assert - Assert.IsType(actualException); - Assert.Equal(httpStatusCode, actualException.StatusCode); - Assert.Equal("Error Message", actualException.Message); - Assert.Same(exception, actualException.InnerException); - } - - [Fact] - public void ToHttpOperationExceptionWithContentReturnsValidException() - { - // Arrange - using var response = new FakeResponse("Response Content", 500); - var exception = new RequestFailedException(response); - - // Act - var actualException = exception.ToHttpOperationException(); - - // Assert - Assert.IsType(actualException); - Assert.Equal(HttpStatusCode.InternalServerError, actualException.StatusCode); - Assert.Equal("Response Content", actualException.ResponseContent); - Assert.Same(exception, actualException.InnerException); - } - - #region private - - private sealed class FakeResponse(string responseContent, int status) : Response - { - private readonly string _responseContent = responseContent; - private readonly IEnumerable _headers = []; - - public override BinaryData Content => BinaryData.FromString(this._responseContent); - public override int Status { get; } = status; - public override string ReasonPhrase => "Reason Phrase"; - public override Stream? ContentStream { get => null; set => throw new NotImplementedException(); } - public override string ClientRequestId { get => "Client Request Id"; set => throw new NotImplementedException(); } - - public override void Dispose() { } - protected override bool ContainsHeader(string name) => throw new NotImplementedException(); - protected override IEnumerable EnumerateHeaders() => this._headers; -#pragma warning disable CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes). - protected override bool TryGetHeader(string name, out string? value) => throw new NotImplementedException(); - protected override bool TryGetHeaderValues(string name, out IEnumerable? values) => throw new NotImplementedException(); -#pragma warning restore CS8765 // Nullability of type of parameter doesn't match overridden member (possibly because of nullability attributes). - } - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs deleted file mode 100644 index 7d1c47388f91..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/OpenAIChatCompletionServiceTests.cs +++ /dev/null @@ -1,687 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.TextGeneration; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion; - -/// -/// Unit tests for -/// -public sealed class OpenAIChatCompletionServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly OpenAIFunction _timepluginDate, _timepluginNow; - private readonly OpenAIPromptExecutionSettings _executionSettings; - private readonly Mock _mockLoggerFactory; - - public OpenAIChatCompletionServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - - IList functions = KernelPluginFactory.CreateFromFunctions("TimePlugin", new[] - { - KernelFunctionFactory.CreateFromMethod((string? format = null) => DateTime.Now.Date.ToString(format, CultureInfo.InvariantCulture), "Date", "TimePlugin.Date"), - KernelFunctionFactory.CreateFromMethod((string? format = null) => DateTime.Now.ToString(format, CultureInfo.InvariantCulture), "Now", "TimePlugin.Now"), - }).GetFunctionsMetadata(); - - this._timepluginDate = functions[0].ToOpenAIFunction(); - this._timepluginNow = functions[1].ToOpenAIFunction(); - - this._executionSettings = new() - { - ToolCallBehavior = ToolCallBehavior.EnableFunctions([this._timepluginDate, this._timepluginNow]) - }; - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAIChatCompletionService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAIChatCompletionService("model-id", "api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData("http://localhost:1234/chat/completions", "http://localhost:1234/chat/completions")] // Uses full path when provided - [InlineData("http://localhost:1234/v2/chat/completions", "http://localhost:1234/v2/chat/completions")] // Uses full path when provided - [InlineData("http://localhost:1234", "http://localhost:1234/v1/chat/completions")] - [InlineData("http://localhost:8080", "http://localhost:8080/v1/chat/completions")] - [InlineData("https://something:8080", "https://something:8080/v1/chat/completions")] // Accepts TLS Secured endpoints - public async Task ItUsesCustomEndpointsWhenProvidedAsync(string endpointProvided, string expectedEndpoint) - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: new Uri(endpointProvided)); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - - // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); - - // Assert - Assert.Equal(expectedEndpoint, this._messageHandlerStub.RequestUri!.ToString()); - } - - [Fact] - public async Task ItUsesHttpClientEndpointIfProvidedEndpointIsMissingAsync() - { - // Arrange - this._httpClient.BaseAddress = new Uri("http://localhost:12312"); - var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: null, httpClient: this._httpClient, endpoint: null!); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - - // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); - - // Assert - Assert.Equal("http://localhost:12312/v1/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); - } - - [Fact] - public async Task ItUsesDefaultEndpointIfProvidedEndpointIsMissingAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "any", apiKey: "abc", httpClient: this._httpClient, endpoint: null!); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - - // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); - - // Assert - Assert.Equal("https://api.openai.com/v1/chat/completions", this._messageHandlerStub.RequestUri!.ToString()); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new OpenAIChatCompletionService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : - new OpenAIChatCompletionService("model-id", client); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task ItCreatesCorrectFunctionToolCallsWhenUsingAutoAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - - // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - Assert.Equal(2, optionsJson.GetProperty("tools").GetArrayLength()); - Assert.Equal("TimePlugin-Date", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); - Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[1].GetProperty("function").GetProperty("name").GetString()); - } - - [Fact] - public async Task ItCreatesCorrectFunctionToolCallsWhenUsingNowAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - this._executionSettings.ToolCallBehavior = ToolCallBehavior.RequireFunction(this._timepluginNow); - - // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - Assert.Equal(1, optionsJson.GetProperty("tools").GetArrayLength()); - Assert.Equal("TimePlugin-Now", optionsJson.GetProperty("tools")[0].GetProperty("function").GetProperty("name").GetString()); - } - - [Fact] - public async Task ItCreatesNoFunctionsWhenUsingNoneAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - this._executionSettings.ToolCallBehavior = null; - - // Act - await chatCompletion.GetChatMessageContentsAsync([], this._executionSettings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - Assert.False(optionsJson.TryGetProperty("functions", out var _)); - } - - [Fact] - public async Task ItAddsIdToChatMessageAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - var chatHistory = new ChatHistory(); - chatHistory.AddMessage(AuthorRole.Tool, "Hello", metadata: new Dictionary() { { OpenAIChatMessageContent.ToolIdProperty, "John Doe" } }); - - // Act - await chatCompletion.GetChatMessageContentsAsync(chatHistory, this._executionSettings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - Assert.Equal(1, optionsJson.GetProperty("messages").GetArrayLength()); - Assert.Equal("John Doe", optionsJson.GetProperty("messages")[0].GetProperty("tool_call_id").GetString()); - } - - [Fact] - public async Task ItGetChatMessageContentsShouldHaveModelIdDefinedAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(AzureChatCompletionResponse, Encoding.UTF8, "application/json") }; - - var chatHistory = new ChatHistory(); - chatHistory.AddMessage(AuthorRole.User, "Hello"); - - // Act - var chatMessage = await chatCompletion.GetChatMessageContentAsync(chatHistory, this._executionSettings); - - // Assert - Assert.NotNull(chatMessage.ModelId); - Assert.Equal("gpt-3.5-turbo", chatMessage.ModelId); - } - - [Fact] - public async Task ItGetTextContentsShouldHaveModelIdDefinedAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(AzureChatCompletionResponse, Encoding.UTF8, "application/json") }; - - var chatHistory = new ChatHistory(); - chatHistory.AddMessage(AuthorRole.User, "Hello"); - - // Act - var textContent = await chatCompletion.GetTextContentAsync("hello", this._executionSettings); - - // Assert - Assert.NotNull(textContent.ModelId); - Assert.Equal("gpt-3.5-turbo", textContent.ModelId); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - var enumerator = service.GetStreamingTextContentsAsync("Prompt").GetAsyncEnumerator(); - - await enumerator.MoveNextAsync(); - Assert.Equal("Test chat streaming response", enumerator.Current.Text); - - await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); - } - - [Fact] - public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAIChatCompletionService("model-id", "api-key", "organization", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - var enumerator = service.GetStreamingChatMessageContentsAsync([]).GetAsyncEnumerator(); - - await enumerator.MoveNextAsync(); - Assert.Equal("Test chat streaming response", enumerator.Current.Content); - - await enumerator.MoveNextAsync(); - Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]); - } - - [Fact] - public async Task ItAddsSystemMessageAsync() - { - // Arrange - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - var chatHistory = new ChatHistory(); - chatHistory.AddMessage(AuthorRole.User, "Hello"); - - // Act - await chatCompletion.GetChatMessageContentsAsync(chatHistory, this._executionSettings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - - var messages = optionsJson.GetProperty("messages"); - Assert.Equal(1, messages.GetArrayLength()); - - Assert.Equal("Hello", messages[0].GetProperty("content").GetString()); - Assert.Equal("user", messages[0].GetProperty("role").GetString()); - } - - [Fact] - public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync() - { - // Arrange - const string Prompt = "This is test prompt"; - const string SystemMessage = "This is test system message"; - const string AssistantMessage = "This is assistant message"; - const string CollectionItemPrompt = "This is collection item prompt"; - - var chatCompletion = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage }; - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { Content = new StringContent(ChatCompletionResponse) }; - - var chatHistory = new ChatHistory(); - chatHistory.AddUserMessage(Prompt); - chatHistory.AddAssistantMessage(AssistantMessage); - chatHistory.AddUserMessage( - [ - new TextContent(CollectionItemPrompt), - new ImageContent(new Uri("https://image")) - ]); - - // Act - await chatCompletion.GetChatMessageContentsAsync(chatHistory, settings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - - var messages = optionsJson.GetProperty("messages"); - - Assert.Equal(4, messages.GetArrayLength()); - - Assert.Equal(SystemMessage, messages[0].GetProperty("content").GetString()); - Assert.Equal("system", messages[0].GetProperty("role").GetString()); - - Assert.Equal(Prompt, messages[1].GetProperty("content").GetString()); - Assert.Equal("user", messages[1].GetProperty("role").GetString()); - - Assert.Equal(AssistantMessage, messages[2].GetProperty("content").GetString()); - Assert.Equal("assistant", messages[2].GetProperty("role").GetString()); - - var contentItems = messages[3].GetProperty("content"); - Assert.Equal(2, contentItems.GetArrayLength()); - Assert.Equal(CollectionItemPrompt, contentItems[0].GetProperty("text").GetString()); - Assert.Equal("text", contentItems[0].GetProperty("type").GetString()); - Assert.Equal("https://image/", contentItems[1].GetProperty("image_url").GetProperty("url").GetString()); - Assert.Equal("image_url", contentItems[1].GetProperty("type").GetString()); - } - - [Fact] - public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfTypeFunctionCallContentAsync() - { - // Arrange - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) - }; - - var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - - var chatHistory = new ChatHistory(); - chatHistory.AddUserMessage("Fake prompt"); - - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - // Act - var result = await sut.GetChatMessageContentAsync(chatHistory, settings); - - // Assert - Assert.NotNull(result); - Assert.Equal(5, result.Items.Count); - - var getCurrentWeatherFunctionCall = result.Items[0] as FunctionCallContent; - Assert.NotNull(getCurrentWeatherFunctionCall); - Assert.Equal("GetCurrentWeather", getCurrentWeatherFunctionCall.FunctionName); - Assert.Equal("MyPlugin", getCurrentWeatherFunctionCall.PluginName); - Assert.Equal("1", getCurrentWeatherFunctionCall.Id); - Assert.Equal("Boston, MA", getCurrentWeatherFunctionCall.Arguments?["location"]?.ToString()); - - var functionWithExceptionFunctionCall = result.Items[1] as FunctionCallContent; - Assert.NotNull(functionWithExceptionFunctionCall); - Assert.Equal("FunctionWithException", functionWithExceptionFunctionCall.FunctionName); - Assert.Equal("MyPlugin", functionWithExceptionFunctionCall.PluginName); - Assert.Equal("2", functionWithExceptionFunctionCall.Id); - Assert.Equal("value", functionWithExceptionFunctionCall.Arguments?["argument"]?.ToString()); - - var nonExistentFunctionCall = result.Items[2] as FunctionCallContent; - Assert.NotNull(nonExistentFunctionCall); - Assert.Equal("NonExistentFunction", nonExistentFunctionCall.FunctionName); - Assert.Equal("MyPlugin", nonExistentFunctionCall.PluginName); - Assert.Equal("3", nonExistentFunctionCall.Id); - Assert.Equal("value", nonExistentFunctionCall.Arguments?["argument"]?.ToString()); - - var invalidArgumentsFunctionCall = result.Items[3] as FunctionCallContent; - Assert.NotNull(invalidArgumentsFunctionCall); - Assert.Equal("InvalidArguments", invalidArgumentsFunctionCall.FunctionName); - Assert.Equal("MyPlugin", invalidArgumentsFunctionCall.PluginName); - Assert.Equal("4", invalidArgumentsFunctionCall.Id); - Assert.Null(invalidArgumentsFunctionCall.Arguments); - Assert.NotNull(invalidArgumentsFunctionCall.Exception); - Assert.Equal("Error: Function call arguments were invalid JSON.", invalidArgumentsFunctionCall.Exception.Message); - Assert.NotNull(invalidArgumentsFunctionCall.Exception.InnerException); - - var intArgumentsFunctionCall = result.Items[4] as FunctionCallContent; - Assert.NotNull(intArgumentsFunctionCall); - Assert.Equal("IntArguments", intArgumentsFunctionCall.FunctionName); - Assert.Equal("MyPlugin", intArgumentsFunctionCall.PluginName); - Assert.Equal("5", intArgumentsFunctionCall.Id); - Assert.Equal("36", intArgumentsFunctionCall.Arguments?["age"]?.ToString()); - } - - [Fact] - public async Task FunctionCallsShouldBeReturnedToLLMAsync() - { - // Arrange - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(ChatCompletionResponse) - }; - - var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - - var items = new ChatMessageContentItemCollection - { - new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), - new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }) - }; - - var chatHistory = new ChatHistory - { - new ChatMessageContent(AuthorRole.Assistant, items) - }; - - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - // Act - await sut.GetChatMessageContentAsync(chatHistory, settings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - - var messages = optionsJson.GetProperty("messages"); - Assert.Equal(1, messages.GetArrayLength()); - - var assistantMessage = messages[0]; - Assert.Equal("assistant", assistantMessage.GetProperty("role").GetString()); - - Assert.Equal(2, assistantMessage.GetProperty("tool_calls").GetArrayLength()); - - var tool1 = assistantMessage.GetProperty("tool_calls")[0]; - Assert.Equal("1", tool1.GetProperty("id").GetString()); - Assert.Equal("function", tool1.GetProperty("type").GetString()); - - var function1 = tool1.GetProperty("function"); - Assert.Equal("MyPlugin-GetCurrentWeather", function1.GetProperty("name").GetString()); - Assert.Equal("{\"location\":\"Boston, MA\"}", function1.GetProperty("arguments").GetString()); - - var tool2 = assistantMessage.GetProperty("tool_calls")[1]; - Assert.Equal("2", tool2.GetProperty("id").GetString()); - Assert.Equal("function", tool2.GetProperty("type").GetString()); - - var function2 = tool2.GetProperty("function"); - Assert.Equal("MyPlugin-GetWeatherForecast", function2.GetProperty("name").GetString()); - Assert.Equal("{\"location\":\"Boston, MA\"}", function2.GetProperty("arguments").GetString()); - } - - [Fact] - public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsync() - { - // Arrange - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(ChatCompletionResponse) - }; - - var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - - var chatHistory = new ChatHistory - { - new ChatMessageContent(AuthorRole.Tool, - [ - new FunctionResultContent(new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), "rainy"), - ]), - new ChatMessageContent(AuthorRole.Tool, - [ - new FunctionResultContent(new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }), "sunny") - ]) - }; - - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - // Act - await sut.GetChatMessageContentAsync(chatHistory, settings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - - var messages = optionsJson.GetProperty("messages"); - Assert.Equal(2, messages.GetArrayLength()); - - var assistantMessage = messages[0]; - Assert.Equal("tool", assistantMessage.GetProperty("role").GetString()); - Assert.Equal("rainy", assistantMessage.GetProperty("content").GetString()); - Assert.Equal("1", assistantMessage.GetProperty("tool_call_id").GetString()); - - var assistantMessage2 = messages[1]; - Assert.Equal("tool", assistantMessage2.GetProperty("role").GetString()); - Assert.Equal("sunny", assistantMessage2.GetProperty("content").GetString()); - Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); - } - - [Fact] - public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessageAsync() - { - // Arrange - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(ChatCompletionResponse) - }; - - var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient); - - var chatHistory = new ChatHistory - { - new ChatMessageContent(AuthorRole.Tool, - [ - new FunctionResultContent(new FunctionCallContent("GetCurrentWeather", "MyPlugin", "1", new KernelArguments() { ["location"] = "Boston, MA" }), "rainy"), - new FunctionResultContent(new FunctionCallContent("GetWeatherForecast", "MyPlugin", "2", new KernelArguments() { ["location"] = "Boston, MA" }), "sunny") - ]) - }; - - var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; - - // Act - await sut.GetChatMessageContentAsync(chatHistory, settings); - - // Assert - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - Assert.NotNull(actualRequestContent); - - var optionsJson = JsonSerializer.Deserialize(actualRequestContent); - - var messages = optionsJson.GetProperty("messages"); - Assert.Equal(2, messages.GetArrayLength()); - - var assistantMessage = messages[0]; - Assert.Equal("tool", assistantMessage.GetProperty("role").GetString()); - Assert.Equal("rainy", assistantMessage.GetProperty("content").GetString()); - Assert.Equal("1", assistantMessage.GetProperty("tool_call_id").GetString()); - - var assistantMessage2 = messages[1]; - Assert.Equal("tool", assistantMessage2.GetProperty("role").GetString()); - Assert.Equal("sunny", assistantMessage2.GetProperty("content").GetString()); - Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString()); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - private const string ChatCompletionResponse = """ - { - "id": "chatcmpl-8IlRBQU929ym1EqAY2J4T7GGkW5Om", - "object": "chat.completion", - "created": 1699482945, - "model": "gpt-3.5-turbo", - "choices": [ - { - "index": 0, - "message": { - "role": "assistant", - "content": null, - "function_call": { - "name": "TimePlugin_Date", - "arguments": "{}" - } - }, - "finish_reason": "stop" - } - ], - "usage": { - "prompt_tokens": 52, - "completion_tokens": 1, - "total_tokens": 53 - } - } - """; - private const string AzureChatCompletionResponse = """ - { - "id": "chatcmpl-8S914omCBNQ0KU1NFtxmupZpzKWv2", - "object": "chat.completion", - "created": 1701718534, - "model": "gpt-3.5-turbo", - "prompt_filter_results": [ - { - "prompt_index": 0, - "content_filter_results": { - "hate": { - "filtered": false, - "severity": "safe" - }, - "self_harm": { - "filtered": false, - "severity": "safe" - }, - "sexual": { - "filtered": false, - "severity": "safe" - }, - "violence": { - "filtered": false, - "severity": "safe" - } - } - } - ], - "choices": [ - { - "index": 0, - "finish_reason": "stop", - "message": { - "role": "assistant", - "content": "Hello! How can I help you today? Please provide me with a question or topic you would like information on." - }, - "content_filter_results": { - "hate": { - "filtered": false, - "severity": "safe" - }, - "self_harm": { - "filtered": false, - "severity": "safe" - }, - "sexual": { - "filtered": false, - "severity": "safe" - }, - "violence": { - "filtered": false, - "severity": "safe" - } - } - } - ], - "usage": { - "prompt_tokens": 23, - "completion_tokens": 23, - "total_tokens": 46 - } - } - """; -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs deleted file mode 100644 index 782267039c59..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletionWithData/AzureOpenAIChatCompletionWithDataTests.cs +++ /dev/null @@ -1,201 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletionWithData; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for -/// -public sealed class AzureOpenAIChatCompletionWithDataTests : IDisposable -{ - private readonly AzureOpenAIChatCompletionWithDataConfig _config; - - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAIChatCompletionWithDataTests() - { - this._config = this.GetConfig(); - - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient, this._mockLoggerFactory.Object) : - new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - - // Assert - Assert.NotNull(service); - Assert.Equal("fake-completion-model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task SpecifiedConfigurationShouldBeUsedAsync() - { - // Arrange - const string ExpectedUri = "https://fake-completion-endpoint/openai/deployments/fake-completion-model-id/extensions/chat/completions?api-version=fake-api-version"; - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - - // Act - await service.GetChatMessageContentsAsync([]); - - // Assert - var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; - var actualRequestHeaderValues = this._messageHandlerStub.RequestHeaders!.GetValues("Api-Key"); - var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - - Assert.Equal(ExpectedUri, actualUri); - - Assert.Contains("fake-completion-api-key", actualRequestHeaderValues); - Assert.Contains("https://fake-data-source-endpoint", actualRequestContent, StringComparison.OrdinalIgnoreCase); - Assert.Contains("fake-data-source-api-key", actualRequestContent, StringComparison.OrdinalIgnoreCase); - Assert.Contains("fake-data-source-index", actualRequestContent, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task DefaultApiVersionShouldBeUsedAsync() - { - // Arrange - var config = this.GetConfig(); - config.CompletionApiVersion = string.Empty; - - var service = new AzureOpenAIChatCompletionWithDataService(config, this._httpClient); - - // Act - await service.GetChatMessageContentsAsync([]); - - // Assert - var actualUri = this._messageHandlerStub.RequestUri?.AbsoluteUri; - - Assert.Contains("2024-02-01", actualUri, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task GetChatMessageContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_with_data_test_response.json")) - }; - - // Act - var result = await service.GetChatMessageContentsAsync([]); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat with data response", result[0].Content); - - var usage = result[0].Metadata?["Usage"] as ChatWithDataUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_with_data_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingChatMessageContentsAsync([])) - { - Assert.Equal("Test chat with data streaming response", chunk.Content); - } - } - - [Fact] - public async Task GetTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_with_data_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt"); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat with data response", result[0].Text); - - var usage = result[0].Metadata?["Usage"] as ChatWithDataUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAIChatCompletionWithDataService(this._config, this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_with_data_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) - { - Assert.Equal("Test chat with data streaming response", chunk.Text); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - private AzureOpenAIChatCompletionWithDataConfig GetConfig() - { - return new AzureOpenAIChatCompletionWithDataConfig - { - CompletionModelId = "fake-completion-model-id", - CompletionEndpoint = "https://fake-completion-endpoint", - CompletionApiKey = "fake-completion-api-key", - CompletionApiVersion = "fake-api-version", - DataSourceEndpoint = "https://fake-data-source-endpoint", - DataSourceApiKey = "fake-data-source-api-key", - DataSourceIndex = "fake-data-source-index" - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs deleted file mode 100644 index 08bde153aa4a..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIMemoryBuilderExtensionsTests.cs +++ /dev/null @@ -1,66 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Azure.Core; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Memory; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Unit tests for class. -/// -public sealed class OpenAIMemoryBuilderExtensionsTests -{ - private readonly Mock _mockMemoryStore = new(); - - [Fact] - public void AzureOpenAITextEmbeddingGenerationWithApiKeyWorksCorrectly() - { - // Arrange - var builder = new MemoryBuilder(); - - // Act - var memory = builder - .WithAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key", "model-id") - .WithMemoryStore(this._mockMemoryStore.Object) - .Build(); - - // Assert - Assert.NotNull(memory); - } - - [Fact] - public void AzureOpenAITextEmbeddingGenerationWithTokenCredentialWorksCorrectly() - { - // Arrange - var builder = new MemoryBuilder(); - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - - // Act - var memory = builder - .WithAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials, "model-id") - .WithMemoryStore(this._mockMemoryStore.Object) - .Build(); - - // Assert - Assert.NotNull(memory); - } - - [Fact] - public void OpenAITextEmbeddingGenerationWithApiKeyWorksCorrectly() - { - // Arrange - var builder = new MemoryBuilder(); - - // Act - var memory = builder - .WithOpenAITextEmbeddingGeneration("model-id", "api-key", "organization-id") - .WithMemoryStore(this._mockMemoryStore.Object) - .Build(); - - // Assert - Assert.NotNull(memory); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs deleted file mode 100644 index 5cc41c3c881e..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIServiceCollectionExtensionsTests.cs +++ /dev/null @@ -1,746 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.AudioToText; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Embeddings; -using Microsoft.SemanticKernel.TextGeneration; -using Microsoft.SemanticKernel.TextToAudio; -using Microsoft.SemanticKernel.TextToImage; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -#pragma warning disable CS0618 // AzureOpenAIChatCompletionWithData is deprecated in favor of OpenAIPromptExecutionSettings.AzureChatExtensionsOptions - -/// -/// Unit tests for class. -/// -public sealed class OpenAIServiceCollectionExtensionsTests : IDisposable -{ - private readonly HttpClient _httpClient; - - public OpenAIServiceCollectionExtensionsTests() - { - this._httpClient = new HttpClient(); - } - - #region Text generation - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddAzureOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAITextGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAITextGeneration("deployment-name"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddAzureOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAITextGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAITextGeneration("deployment-name"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAITextGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAITextGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAITextGeneration("model-id"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAITextGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAITextGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAITextGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAITextGeneration("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextGenerationService); - } - - #endregion - - #region Text embeddings - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextEmbeddingGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextEmbeddingGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAITextEmbeddingGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAITextEmbeddingGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAITextEmbeddingGeneration("model-id"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextEmbeddingGenerationService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAITextEmbeddingGenerationAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAITextEmbeddingGeneration("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAITextEmbeddingGeneration("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAITextEmbeddingGeneration("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextEmbeddingGenerationService); - } - - #endregion - - #region Chat completion - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - [InlineData(InitializationType.ChatCompletionWithData)] - public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var config = this.GetCompletionWithDataConfig(); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"), - InitializationType.ChatCompletionWithData => builder.AddAzureOpenAIChatCompletion(config), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - - if (type == InitializationType.ChatCompletionWithData) - { - Assert.True(service is AzureOpenAIChatCompletionWithDataService); - } - else - { - Assert.True(service is AzureOpenAIChatCompletionService); - } - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - [InlineData(InitializationType.ChatCompletionWithData)] - public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var config = this.GetCompletionWithDataConfig(); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"), - InitializationType.ChatCompletionWithData => builder.Services.AddAzureOpenAIChatCompletion(config), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - - if (type == InitializationType.ChatCompletionWithData) - { - Assert.True(service is AzureOpenAIChatCompletionWithDataService); - } - else - { - Assert.True(service is AzureOpenAIChatCompletionService); - } - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientEndpoint)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAIChatCompletion("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAIChatCompletion("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIChatCompletion("model-id"), - InitializationType.OpenAIClientEndpoint => builder.AddOpenAIChatCompletion("model-id", new Uri("http://localhost:12345"), "apikey"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIChatCompletionService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientEndpoint)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAIChatCompletionAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAIChatCompletion("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAIChatCompletion("model-id", client), - InitializationType.OpenAIClientEndpoint => builder.Services.AddOpenAIChatCompletion("model-id", new Uri("http://localhost:12345"), "apikey"), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAIChatCompletion("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIChatCompletionService); - } - - #endregion - - #region Text to image - - [Fact] - public void KernelBuilderAddAzureOpenAITextToImageAddsValidServiceWithTokenCredentials() - { - // Arrange - var builder = Kernel.CreateBuilder(); - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - - // Act - builder = builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void ServiceCollectionAddAzureOpenAITextToImageAddsValidServiceTokenCredentials() - { - // Arrange - var builder = Kernel.CreateBuilder(); - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - - // Act - builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void KernelBuilderAddAzureOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void ServiceCollectionAddAzureOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToImageService); - } - - [Fact] - public void KernelBuilderAddOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddOpenAITextToImage("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToImageService); - } - - [Fact] - public void ServiceCollectionAddOpenAITextToImageAddsValidServiceWithApiKey() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddOpenAITextToImage("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToImageService); - } - - #endregion - - #region Text to audio - - [Fact] - public void KernelBuilderAddAzureOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToAudioService); - } - - [Fact] - public void ServiceCollectionAddAzureOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAITextToAudioService); - } - - [Fact] - public void KernelBuilderAddOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder = builder.AddOpenAITextToAudio("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToAudioService); - } - - [Fact] - public void ServiceCollectionAddOpenAITextToAudioAddsValidService() - { - // Arrange - var builder = Kernel.CreateBuilder(); - - // Act - builder.Services.AddOpenAITextToAudio("model-id", "api-key"); - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAITextToAudioService); - } - - #endregion - - #region Audio to text - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.AddAzureOpenAIAudioToText("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddAzureOpenAIAudioToText("deployment-name"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAIAudioToTextService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.TokenCredential)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddAzureOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"), - InitializationType.TokenCredential => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials), - InitializationType.OpenAIClientInline => builder.Services.AddAzureOpenAIAudioToText("deployment-name", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddAzureOpenAIAudioToText("deployment-name"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is AzureOpenAIAudioToTextService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void KernelBuilderAddOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - builder = type switch - { - InitializationType.ApiKey => builder.AddOpenAIAudioToText("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.AddOpenAIAudioToText("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.AddOpenAIAudioToText("model-id"), - _ => builder - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIAudioToTextService); - } - - [Theory] - [InlineData(InitializationType.ApiKey)] - [InlineData(InitializationType.OpenAIClientInline)] - [InlineData(InitializationType.OpenAIClientInServiceProvider)] - public void ServiceCollectionAddOpenAIAudioToTextAddsValidService(InitializationType type) - { - // Arrange - var client = new OpenAIClient("key"); - var builder = Kernel.CreateBuilder(); - - builder.Services.AddSingleton(client); - - // Act - IServiceCollection collection = type switch - { - InitializationType.ApiKey => builder.Services.AddOpenAIAudioToText("model-id", "api-key"), - InitializationType.OpenAIClientInline => builder.Services.AddOpenAIAudioToText("model-id", client), - InitializationType.OpenAIClientInServiceProvider => builder.Services.AddOpenAIAudioToText("model-id"), - _ => builder.Services - }; - - // Assert - var service = builder.Build().GetRequiredService(); - - Assert.NotNull(service); - Assert.True(service is OpenAIAudioToTextService); - } - - #endregion - - public void Dispose() - { - this._httpClient.Dispose(); - } - - public enum InitializationType - { - ApiKey, - TokenCredential, - OpenAIClientInline, - OpenAIClientInServiceProvider, - OpenAIClientEndpoint, - ChatCompletionWithData - } - - private AzureOpenAIChatCompletionWithDataConfig GetCompletionWithDataConfig() - { - return new() - { - CompletionApiKey = "completion-api-key", - CompletionApiVersion = "completion-v1", - CompletionEndpoint = "https://completion-endpoint", - CompletionModelId = "completion-model-id", - DataSourceApiKey = "data-source-api-key", - DataSourceEndpoint = "https://data-source-endpoint", - DataSourceIndex = "data-source-index" - }; - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs deleted file mode 100644 index f6ee6bb93a11..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAITestHelper.cs +++ /dev/null @@ -1,20 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.IO; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI; - -/// -/// Helper for OpenAI test purposes. -/// -internal static class OpenAITestHelper -{ - /// - /// Reads test response from file for mocking purposes. - /// - /// Name of the file with test response. - internal static string GetTestResponse(string fileName) - { - return File.ReadAllText($"./OpenAI/TestData/{fileName}"); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt deleted file mode 100644 index 0e26da41d32b..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TestData/filters_streaming_multiple_function_calls_test_response.txt +++ /dev/null @@ -1,5 +0,0 @@ -data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"tool-call-id-1","type":"function","function":{"name":"MyPlugin-Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]} - -data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"tool-call-id-2","type":"function","function":{"name":"MyPlugin-Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]} - -data: [DONE] diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs deleted file mode 100644 index 640280830ba2..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/AzureOpenAITextEmbeddingGenerationServiceTests.cs +++ /dev/null @@ -1,188 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextEmbedding; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextEmbeddingGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextEmbeddingGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextEmbeddingGenerationService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextEmbeddingGenerationService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new AzureOpenAITextEmbeddingGenerationService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextEmbeddingGenerationService("deployment", client, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GenerateEmbeddingsForEmptyDataReturnsEmptyResultAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - - // Act - var result = await service.GenerateEmbeddingsAsync([]); - - // Assert - Assert.Empty(result); - } - - [Fact] - public async Task GenerateEmbeddingsWithEmptyResponseThrowsExceptionAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GenerateEmbeddingsAsync(["test"])); - Assert.Equal("Expected 1 text embedding(s), but received 0", exception.Message); - } - - [Fact] - public async Task GenerateEmbeddingsByDefaultWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - var result = await service.GenerateEmbeddingsAsync(["test"]); - - // Assert - Assert.Single(result); - - var memory = result[0]; - - Assert.Equal(0.018990106880664825, memory.Span[0]); - Assert.Equal(-0.0073809814639389515, memory.Span[1]); - } - - [Fact] - public async Task GenerateEmbeddingsWithDimensionsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextEmbeddingGenerationService( - "deployment-name", - "https://endpoint", - "api-key", - "model-id", - this._httpClient, - dimensions: 256); - - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - await service.GenerateEmbeddingsAsync(["test"]); - - var requestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - var optionsJson = JsonSerializer.Deserialize(requestContent); - - // Assert - Assert.Equal(256, optionsJson.GetProperty("dimensions").GetInt32()); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - #region private - - private HttpResponseMessage SuccessfulResponse - => new(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [ - { - "object": "embedding", - "embedding": [ - 0.018990106880664825, - -0.0073809814639389515 - ], - "index": 0 - } - ], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs deleted file mode 100644 index 76638ae9cc9f..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextEmbedding/OpenAITextEmbeddingGenerationServiceTests.cs +++ /dev/null @@ -1,164 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextEmbedding; - -/// -/// Unit tests for class. -/// -public sealed class OpenAITextEmbeddingGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public OpenAITextEmbeddingGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new OpenAITextEmbeddingGenerationService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextEmbeddingGenerationService("model-id", client); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GenerateEmbeddingsForEmptyDataReturnsEmptyResultAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); - - // Act - var result = await service.GenerateEmbeddingsAsync([]); - - // Assert - Assert.Empty(result); - } - - [Fact] - public async Task GenerateEmbeddingsWithEmptyResponseThrowsExceptionAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GenerateEmbeddingsAsync(["test"])); - Assert.Equal("Expected 1 text embedding(s), but received 0", exception.Message); - } - - [Fact] - public async Task GenerateEmbeddingsByDefaultWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient); - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - var result = await service.GenerateEmbeddingsAsync(["test"]); - - // Assert - Assert.Single(result); - - var memory = result[0]; - - Assert.Equal(0.018990106880664825, memory.Span[0]); - Assert.Equal(-0.0073809814639389515, memory.Span[1]); - } - - [Fact] - public async Task GenerateEmbeddingsWithDimensionsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextEmbeddingGenerationService("model-id", "api-key", "organization", this._httpClient, dimensions: 256); - this._messageHandlerStub.ResponseToReturn = this.SuccessfulResponse; - - // Act - await service.GenerateEmbeddingsAsync(["test"]); - - var requestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!); - var optionsJson = JsonSerializer.Deserialize(requestContent); - - // Assert - Assert.Equal(256, optionsJson.GetProperty("dimensions").GetInt32()); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - #region private - - private HttpResponseMessage SuccessfulResponse - => new(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "object": "list", - "data": [ - { - "object": "embedding", - "embedding": [ - 0.018990106880664825, - -0.0073809814639389515 - ], - "index": 0 - } - ], - "model": "model-id" - } - """, Encoding.UTF8, "application/json") - }; - - #endregion -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs deleted file mode 100644 index d20bb502e23d..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs +++ /dev/null @@ -1,210 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextGeneration; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextGenerationService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextGenerationService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new AzureOpenAITextGenerationService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextGenerationService("deployment", client, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GetTextContentsWithEmptyChoicesThrowsExceptionAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent("{\"id\":\"response-id\",\"object\":\"text_completion\",\"created\":1646932609,\"model\":\"ada\",\"choices\":[]}") - }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Prompt")); - - Assert.Equal("Text completions not found", exception.Message); - } - - [Theory] - [InlineData(0)] - [InlineData(129)] - public async Task GetTextContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt) - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt }; - - // Act & Assert - var exception = await Assert.ThrowsAsync(() => service.GetTextContentsAsync("Prompt", settings)); - - Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task GetTextContentsHandlesSettingsCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - var settings = new OpenAIPromptExecutionSettings - { - MaxTokens = 123, - Temperature = 0.6, - TopP = 0.5, - FrequencyPenalty = 1.6, - PresencePenalty = 1.2, - ResultsPerPrompt = 5, - TokenSelectionBiases = new Dictionary { { 2, 3 } }, - StopSequences = ["stop_sequence"], - TopLogprobs = 5 - }; - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt", settings); - - // Assert - var requestContent = this._messageHandlerStub.RequestContent; - - Assert.NotNull(requestContent); - - var content = JsonSerializer.Deserialize(Encoding.UTF8.GetString(requestContent)); - - Assert.Equal("Prompt", content.GetProperty("prompt")[0].GetString()); - Assert.Equal(123, content.GetProperty("max_tokens").GetInt32()); - Assert.Equal(0.6, content.GetProperty("temperature").GetDouble()); - Assert.Equal(0.5, content.GetProperty("top_p").GetDouble()); - Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble()); - Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble()); - Assert.Equal(5, content.GetProperty("n").GetInt32()); - Assert.Equal(5, content.GetProperty("best_of").GetInt32()); - Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32()); - Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString()); - Assert.Equal(5, content.GetProperty("logprobs").GetInt32()); - } - - [Fact] - public async Task GetTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt"); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat response", result[0].Text); - - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new AzureOpenAITextGenerationService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("text_completion_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) - { - Assert.Equal("Test chat streaming response", chunk.Text); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs deleted file mode 100644 index b8d804c21b5d..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/OpenAITextGenerationServiceTests.cs +++ /dev/null @@ -1,113 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextGeneration; - -/// -/// Unit tests for class. -/// -public sealed class OpenAITextGenerationServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public OpenAITextGenerationServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAITextGenerationService("model-id", "api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextGenerationService("model-id", "api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var client = new OpenAIClient("key"); - var service = includeLoggerFactory ? - new OpenAITextGenerationService("model-id", client, loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextGenerationService("model-id", client); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Fact] - public async Task GetTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextGenerationService("model-id", "api-key", "organization", this._httpClient); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(OpenAITestHelper.GetTestResponse("text_completion_test_response.json")) - }; - - // Act - var result = await service.GetTextContentsAsync("Prompt"); - - // Assert - Assert.True(result.Count > 0); - Assert.Equal("Test chat response", result[0].Text); - - var usage = result[0].Metadata?["Usage"] as CompletionsUsage; - - Assert.NotNull(usage); - Assert.Equal(55, usage.PromptTokens); - Assert.Equal(100, usage.CompletionTokens); - Assert.Equal(155, usage.TotalTokens); - } - - [Fact] - public async Task GetStreamingTextContentsWorksCorrectlyAsync() - { - // Arrange - var service = new OpenAITextGenerationService("model-id", "api-key", "organization", this._httpClient); - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("text_completion_streaming_test_response.txt"))); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act & Assert - await foreach (var chunk in service.GetStreamingTextContentsAsync("Prompt")) - { - Assert.Equal("Test chat streaming response", chunk.Text); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs deleted file mode 100644 index baa11a265f0a..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToAudio/AzureOpenAITextToAudioServiceTests.cs +++ /dev/null @@ -1,130 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToAudio; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextToAudioServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextToAudioServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - Assert.Equal("deployment-name", service.Attributes["DeploymentName"]); - } - - [Theory] - [MemberData(nameof(ExecutionSettings))] - public async Task GetAudioContentWithInvalidSettingsThrowsExceptionAsync(OpenAITextToAudioExecutionSettings? settings, Type expectedExceptionType) - { - // Arrange - var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - await using var stream = new MemoryStream(new byte[] { 0x00, 0x00, 0xFF, 0x7F }); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act - var exception = await Record.ExceptionAsync(() => service.GetAudioContentsAsync("Some text", settings)); - - // Assert - Assert.NotNull(exception); - Assert.IsType(expectedExceptionType, exception); - } - - [Fact] - public async Task GetAudioContentByDefaultWorksCorrectlyAsync() - { - // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; - - var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); - - // Assert - var audioData = result[0].Data!.Value; - Assert.False(audioData.IsEmpty); - Assert.True(audioData.Span.SequenceEqual(expectedByteArray)); - } - - [Theory] - [InlineData(true, "http://local-endpoint")] - [InlineData(false, "https://endpoint")] - public async Task GetAudioContentUsesValidBaseUrlAsync(bool useHttpClientBaseAddress, string expectedBaseAddress) - { - // Arrange - var expectedByteArray = new byte[] { 0x00, 0x00, 0xFF, 0x7F }; - - if (useHttpClientBaseAddress) - { - this._httpClient.BaseAddress = new Uri("http://local-endpoint"); - } - - var service = new AzureOpenAITextToAudioService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient); - await using var stream = new MemoryStream(expectedByteArray); - - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StreamContent(stream) - }; - - // Act - var result = await service.GetAudioContentsAsync("Some text", new OpenAITextToAudioExecutionSettings("voice")); - - // Assert - Assert.StartsWith(expectedBaseAddress, this._messageHandlerStub.RequestUri!.AbsoluteUri, StringComparison.InvariantCulture); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } - - public static TheoryData ExecutionSettings => new() - { - { new OpenAITextToAudioExecutionSettings(""), typeof(ArgumentException) }, - }; -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs deleted file mode 100644 index 084fa923b2ce..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/AzureOpenAITextToImageTests.cs +++ /dev/null @@ -1,174 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Azure.Core; -using Azure.Core.Pipeline; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Services; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToImage; - -/// -/// Unit tests for class. -/// -public sealed class AzureOpenAITextToImageServiceTests : IDisposable -{ - private readonly MultipleHttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public AzureOpenAITextToImageServiceTests() - { - this._messageHandlerStub = new MultipleHttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - - var mockLogger = new Mock(); - - mockLogger.Setup(l => l.IsEnabled(It.IsAny())).Returns(true); - - this._mockLoggerFactory.Setup(l => l.CreateLogger(It.IsAny())).Returns(mockLogger.Object); - } - - [Fact] - public async Task ItSupportsOpenAIClientInjectionAsync() - { - // Arrange - using var messageHandlerStub = new HttpMessageHandlerStub(); - using var httpClient = new HttpClient(messageHandlerStub, false); - messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "created": 1702575371, - "data": [ - { - "revised_prompt": "A photo capturing the diversity of the Earth's landscapes.", - "url": "https://dalleprodsec.blob.core.windows.net/private/images/0f20c621-7eb0-449d-87fd-8dd2a3a15fbe/generated_00.png?se=2023-12-15T17%3A36%3A25Z&sig=jd2%2Fa8jOM9NmclrUbOLdRgAxcFDFPezOpG%2BSF82d7zM%3D&ske=2023-12-20T10%3A10%3A28Z&skoid=e52d5ed7-0657-4f62-bc12-7e5dbb260a96&sks=b&skt=2023-12-13T10%3A10%3A28Z&sktid=33e01921-4d64-4f8c-a055-5bdaffd5e33d&skv=2020-10-02&sp=r&spr=https&sr=b&sv=2020-10-02" - } - ] - } - """, Encoding.UTF8, "application/json") - }; - var clientOptions = new OpenAIClientOptions - { - Transport = new HttpClientTransport(httpClient), - }; - var openAIClient = new OpenAIClient(new Uri("https://az.com"), new Azure.AzureKeyCredential("NOKEY"), clientOptions); - - var textToImageCompletion = new AzureOpenAITextToImageService(deploymentName: "gpt-35-turbo", openAIClient, modelId: "gpt-3.5-turbo"); - - // Act - var result = await textToImageCompletion.GenerateImageAsync("anything", 1024, 1024); - - // Assert - Assert.NotNull(result); - } - - [Theory] - [InlineData(1024, 1024, null)] - [InlineData(1792, 1024, null)] - [InlineData(1024, 1792, null)] - [InlineData(512, 512, typeof(NotSupportedException))] - [InlineData(256, 256, typeof(NotSupportedException))] - [InlineData(123, 456, typeof(NotSupportedException))] - public async Task ItValidatesTheModelIdAsync(int width, int height, Type? expectedExceptionType) - { - // Arrange - using var messageHandlerStub = new HttpMessageHandlerStub(); - using var httpClient = new HttpClient(messageHandlerStub, false); - messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "created": 1702575371, - "data": [ - { - "revised_prompt": "A photo capturing the diversity of the Earth's landscapes.", - "url": "https://dalleprodsec.blob.core.windows.net/private/images/0f20c621-7eb0-449d-87fd-8dd2a3a15fbe/generated_00.png?se=2023-12-15T17%3A36%3A25Z&sig=jd2%2Fa8jOM9NmclrUbOLdRgAxcFDFPezOpG%2BSF82d7zM%3D&ske=2023-12-20T10%3A10%3A28Z&skoid=e52d5ed7-0657-4f62-bc12-7e5dbb260a96&sks=b&skt=2023-12-13T10%3A10%3A28Z&sktid=33e01921-4d64-4f8c-a055-5bdaffd5e33d&skv=2020-10-02&sp=r&spr=https&sr=b&sv=2020-10-02" - } - ] - } - """, Encoding.UTF8, "application/json") - }; - - var textToImageCompletion = new AzureOpenAITextToImageService(deploymentName: "gpt-35-turbo", modelId: "gpt-3.5-turbo", endpoint: "https://az.com", apiKey: "NOKEY", httpClient: httpClient); - - if (expectedExceptionType is not null) - { - await Assert.ThrowsAsync(expectedExceptionType, () => textToImageCompletion.GenerateImageAsync("anything", width, height)); - } - else - { - // Act - var result = await textToImageCompletion.GenerateImageAsync("anything", width, height); - - // Assert - Assert.NotNull(result); - } - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken()); - var service = includeLoggerFactory ? - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id", loggerFactory: this._mockLoggerFactory.Object) : - new AzureOpenAITextToImageService("deployment", "https://endpoint", credentials, "model-id"); - - // Assert - Assert.NotNull(service); - Assert.Equal("model-id", service.Attributes["ModelId"]); - } - - [Theory] - [InlineData("gpt-35-turbo", "gpt-3.5-turbo")] - [InlineData("gpt-35-turbo", null)] - [InlineData("gpt-4-turbo", "gpt-4")] - public void ItHasPropertiesAsDefined(string deploymentName, string? modelId) - { - var service = new AzureOpenAITextToImageService(deploymentName, "https://az.com", "NOKEY", modelId); - Assert.Contains(AzureOpenAITextToImageService.DeploymentNameKey, service.Attributes); - Assert.Equal(deploymentName, service.Attributes[AzureOpenAITextToImageService.DeploymentNameKey]); - - if (modelId is null) - { - return; - } - - Assert.Contains(AIServiceExtensions.ModelIdKey, service.Attributes); - Assert.Equal(modelId, service.Attributes[AIServiceExtensions.ModelIdKey]); - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs deleted file mode 100644 index 1f31ec076edd..000000000000 --- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs +++ /dev/null @@ -1,89 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Text; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Moq; -using Xunit; - -namespace SemanticKernel.Connectors.UnitTests.OpenAI.TextToImage; - -/// -/// Unit tests for class. -/// -public sealed class OpenAITextToImageServiceTests : IDisposable -{ - private readonly HttpMessageHandlerStub _messageHandlerStub; - private readonly HttpClient _httpClient; - private readonly Mock _mockLoggerFactory; - - public OpenAITextToImageServiceTests() - { - this._messageHandlerStub = new HttpMessageHandlerStub(); - this._httpClient = new HttpClient(this._messageHandlerStub, false); - this._mockLoggerFactory = new Mock(); - } - - [Theory] - [InlineData(true)] - [InlineData(false)] - public void ConstructorWorksCorrectly(bool includeLoggerFactory) - { - // Arrange & Act - var service = includeLoggerFactory ? - new OpenAITextToImageService("api-key", "organization", loggerFactory: this._mockLoggerFactory.Object) : - new OpenAITextToImageService("api-key", "organization"); - - // Assert - Assert.NotNull(service); - Assert.Equal("organization", service.Attributes["Organization"]); - Assert.False(service.Attributes.ContainsKey("ModelId")); - } - - [Theory] - [InlineData(123, 456, true)] - [InlineData(256, 512, true)] - [InlineData(256, 256, false)] - [InlineData(512, 512, false)] - [InlineData(1024, 1024, false)] - public async Task GenerateImageWorksCorrectlyAsync(int width, int height, bool expectedException) - { - // Arrange - var service = new OpenAITextToImageService("api-key", "organization", "dall-e-3", this._httpClient); - Assert.Equal("dall-e-3", service.Attributes["ModelId"]); - this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK) - { - Content = new StringContent(""" - { - "created": 1702575371, - "data": [ - { - "url": "https://image-url" - } - ] - } - """, Encoding.UTF8, "application/json") - }; - - // Act & Assert - if (expectedException) - { - await Assert.ThrowsAsync(() => service.GenerateImageAsync("description", width, height)); - } - else - { - var result = await service.GenerateImageAsync("description", width, height); - - Assert.Equal("https://image-url", result); - } - } - - public void Dispose() - { - this._httpClient.Dispose(); - this._messageHandlerStub.Dispose(); - } -} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/.editorconfig new file mode 100644 index 000000000000..394eef685f21 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/.editorconfig @@ -0,0 +1,6 @@ +# Suppressing errors for Test projects under dotnet folder +[*.cs] +dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task +dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave +dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member +dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj new file mode 100644 index 000000000000..ca442f3b3233 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/Connectors.Weaviate.UnitTests.csproj @@ -0,0 +1,37 @@ +๏ปฟ + + + SemanticKernel.Connectors.Weaviate.UnitTests + SemanticKernel.Connectors.Weaviate.UnitTests + net8.0 + true + enable + disable + false + $(NoWarn);SKEXP0001,SKEXP0020,VSTHRD111,CA2007 + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs new file mode 100644 index 000000000000..5e13d1d2f4c4 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateGenericDataModelMapperTests.cs @@ -0,0 +1,445 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateGenericDataModelMapperTests +{ + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = + { + new WeaviateDateTimeOffsetConverter(), + new WeaviateNullableDateTimeOffsetConverter() + } + }; + + private static readonly VectorStoreRecordKeyProperty s_keyProperty = new("Key", typeof(Guid)); + + private static readonly List s_dataProperties = new() + { + new VectorStoreRecordDataProperty("StringDataProp", typeof(string)), + new VectorStoreRecordDataProperty("BoolDataProp", typeof(bool)), + new VectorStoreRecordDataProperty("NullableBoolDataProp", typeof(bool?)), + new VectorStoreRecordDataProperty("IntDataProp", typeof(int)), + new VectorStoreRecordDataProperty("NullableIntDataProp", typeof(int?)), + new VectorStoreRecordDataProperty("LongDataProp", typeof(long)), + new VectorStoreRecordDataProperty("NullableLongDataProp", typeof(long?)), + new VectorStoreRecordDataProperty("ShortDataProp", typeof(short)), + new VectorStoreRecordDataProperty("NullableShortDataProp", typeof(short?)), + new VectorStoreRecordDataProperty("ByteDataProp", typeof(byte)), + new VectorStoreRecordDataProperty("NullableByteDataProp", typeof(byte?)), + new VectorStoreRecordDataProperty("FloatDataProp", typeof(float)), + new VectorStoreRecordDataProperty("NullableFloatDataProp", typeof(float?)), + new VectorStoreRecordDataProperty("DoubleDataProp", typeof(double)), + new VectorStoreRecordDataProperty("NullableDoubleDataProp", typeof(double?)), + new VectorStoreRecordDataProperty("DecimalDataProp", typeof(decimal)), + new VectorStoreRecordDataProperty("NullableDecimalDataProp", typeof(decimal?)), + new VectorStoreRecordDataProperty("DateTimeDataProp", typeof(DateTime)), + new VectorStoreRecordDataProperty("NullableDateTimeDataProp", typeof(DateTime?)), + new VectorStoreRecordDataProperty("DateTimeOffsetDataProp", typeof(DateTimeOffset)), + new VectorStoreRecordDataProperty("NullableDateTimeOffsetDataProp", typeof(DateTimeOffset?)), + new VectorStoreRecordDataProperty("GuidDataProp", typeof(Guid)), + new VectorStoreRecordDataProperty("NullableGuidDataProp", typeof(Guid?)), + new VectorStoreRecordDataProperty("TagListDataProp", typeof(List)), + }; + + private static readonly List s_vectorProperties = new() + { + new VectorStoreRecordVectorProperty("FloatVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableFloatVector", typeof(ReadOnlyMemory?)), + new VectorStoreRecordVectorProperty("DoubleVector", typeof(ReadOnlyMemory)), + new VectorStoreRecordVectorProperty("NullableDoubleVector", typeof(ReadOnlyMemory?)), + }; + + private static readonly Dictionary s_storagePropertyNames = s_dataProperties + .Select(l => l.DataModelPropertyName) + .Concat(s_vectorProperties.Select(l => l.DataModelPropertyName)) + .Concat([s_keyProperty.DataModelPropertyName]) + .ToDictionary(k => k, v => v); + + private static readonly float[] s_floatVector = [1.0f, 2.0f, 3.0f]; + private static readonly double[] s_doubleVector = [1.0f, 2.0f, 3.0f]; + private static readonly List s_taglist = ["tag1", "tag2"]; + + [Fact] + public void MapFromDataToStorageModelMapsAllSupportedTypes() + { + // Arrange + var key = new Guid("55555555-5555-5555-5555-555555555555"); + var sut = new WeaviateGenericDataModelMapper( + "Collection", + s_keyProperty, + s_dataProperties, + s_vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var dataModel = new VectorStoreGenericDataModel(key) + { + Data = + { + ["StringDataProp"] = "string", + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["ShortDataProp"] = (short)5, + ["NullableShortDataProp"] = (short)6, + ["ByteDataProp"] = (byte)7, + ["NullableByteDataProp"] = (byte)8, + ["FloatDataProp"] = 9.0f, + ["NullableFloatDataProp"] = 10.0f, + ["DoubleDataProp"] = 11.0, + ["NullableDoubleDataProp"] = 12.0, + ["DecimalDataProp"] = 13.99m, + ["NullableDecimalDataProp"] = 14.00m, + ["DateTimeDataProp"] = new DateTime(2021, 1, 1), + ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1), + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["GuidDataProp"] = new Guid("11111111-1111-1111-1111-111111111111"), + ["NullableGuidDataProp"] = new Guid("22222222-2222-2222-2222-222222222222"), + ["TagListDataProp"] = s_taglist + }, + Vectors = + { + ["FloatVector"] = new ReadOnlyMemory(s_floatVector), + ["NullableFloatVector"] = new ReadOnlyMemory(s_floatVector), + ["DoubleVector"] = new ReadOnlyMemory(s_doubleVector), + ["NullableDoubleVector"] = new ReadOnlyMemory(s_doubleVector), + } + }; + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal(key, (Guid?)storageModel["id"]); + Assert.Equal("Collection", (string?)storageModel["class"]); + Assert.Equal("string", (string?)storageModel["properties"]?["StringDataProp"]); + Assert.Equal(true, (bool?)storageModel["properties"]?["BoolDataProp"]); + Assert.Equal(false, (bool?)storageModel["properties"]?["NullableBoolDataProp"]); + Assert.Equal(1, (int?)storageModel["properties"]?["IntDataProp"]); + Assert.Equal(2, (int?)storageModel["properties"]?["NullableIntDataProp"]); + Assert.Equal(3L, (long?)storageModel["properties"]?["LongDataProp"]); + Assert.Equal(4L, (long?)storageModel["properties"]?["NullableLongDataProp"]); + Assert.Equal((short)5, (short?)storageModel["properties"]?["ShortDataProp"]); + Assert.Equal((short)6, (short?)storageModel["properties"]?["NullableShortDataProp"]); + Assert.Equal((byte)7, (byte?)storageModel["properties"]?["ByteDataProp"]); + Assert.Equal((byte)8, (byte?)storageModel["properties"]?["NullableByteDataProp"]); + Assert.Equal(9.0f, (float?)storageModel["properties"]?["FloatDataProp"]); + Assert.Equal(10.0f, (float?)storageModel["properties"]?["NullableFloatDataProp"]); + Assert.Equal(11.0, (double?)storageModel["properties"]?["DoubleDataProp"]); + Assert.Equal(12.0, (double?)storageModel["properties"]?["NullableDoubleDataProp"]); + Assert.Equal(13.99m, (decimal?)storageModel["properties"]?["DecimalDataProp"]); + Assert.Equal(14.00m, (decimal?)storageModel["properties"]?["NullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), (DateTime?)storageModel["properties"]?["DateTimeDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), (DateTime?)storageModel["properties"]?["NullableDateTimeDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["properties"]?["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), (DateTimeOffset?)storageModel["properties"]?["NullableDateTimeOffsetDataProp"]); + Assert.Equal(new Guid("11111111-1111-1111-1111-111111111111"), (Guid?)storageModel["properties"]?["GuidDataProp"]); + Assert.Equal(new Guid("22222222-2222-2222-2222-222222222222"), (Guid?)storageModel["properties"]?["NullableGuidDataProp"]); + Assert.Equal(s_taglist, storageModel["properties"]?["TagListDataProp"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_floatVector, storageModel["vectors"]?["FloatVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_floatVector, storageModel["vectors"]?["NullableFloatVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_doubleVector, storageModel["vectors"]?["DoubleVector"]!.AsArray().GetValues().ToArray()); + Assert.Equal(s_doubleVector, storageModel["vectors"]?["NullableDoubleVector"]!.AsArray().GetValues().ToArray()); + } + + [Fact] + public void MapFromDataToStorageModelMapsNullValues() + { + // Arrange + var key = new Guid("55555555-5555-5555-5555-555555555555"); + var keyProperty = new VectorStoreRecordKeyProperty("Key", typeof(Guid)); + + var dataProperties = new List + { + new("StringDataProp", typeof(string)), + new("NullableIntDataProp", typeof(int?)), + }; + + var vectorProperties = new List + { + new("NullableFloatVector", typeof(ReadOnlyMemory?)) + }; + + var dataModel = new VectorStoreGenericDataModel(key) + { + Data = + { + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + }, + Vectors = + { + ["NullableFloatVector"] = null, + }, + }; + + var sut = new WeaviateGenericDataModelMapper( + "Collection", + keyProperty, + dataProperties, + vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Null(storageModel["StringDataProp"]); + Assert.Null(storageModel["NullableIntDataProp"]); + Assert.Null(storageModel["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelMapsAllSupportedTypes() + { + // Arrange + var key = new Guid("55555555-5555-5555-5555-555555555555"); + var sut = new WeaviateGenericDataModelMapper( + "Collection", + s_keyProperty, + s_dataProperties, + s_vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var storageModel = new JsonObject + { + ["id"] = key, + ["properties"] = new JsonObject + { + ["StringDataProp"] = "string", + ["BoolDataProp"] = true, + ["NullableBoolDataProp"] = false, + ["IntDataProp"] = 1, + ["NullableIntDataProp"] = 2, + ["LongDataProp"] = 3L, + ["NullableLongDataProp"] = 4L, + ["ShortDataProp"] = (short)5, + ["NullableShortDataProp"] = (short)6, + ["ByteDataProp"] = (byte)7, + ["NullableByteDataProp"] = (byte)8, + ["FloatDataProp"] = 9.0f, + ["NullableFloatDataProp"] = 10.0f, + ["DoubleDataProp"] = 11.0, + ["NullableDoubleDataProp"] = 12.0, + ["DecimalDataProp"] = 13.99m, + ["NullableDecimalDataProp"] = 14.00m, + ["DateTimeDataProp"] = new DateTime(2021, 1, 1), + ["NullableDateTimeDataProp"] = new DateTime(2021, 1, 1), + ["DateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["NullableDateTimeOffsetDataProp"] = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), + ["GuidDataProp"] = new Guid("11111111-1111-1111-1111-111111111111"), + ["NullableGuidDataProp"] = new Guid("22222222-2222-2222-2222-222222222222"), + ["TagListDataProp"] = new JsonArray(s_taglist.Select(l => (JsonValue)l).ToArray()) + }, + ["vectors"] = new JsonObject + { + ["FloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), + ["NullableFloatVector"] = new JsonArray(s_floatVector.Select(l => (JsonValue)l).ToArray()), + ["DoubleVector"] = new JsonArray(s_doubleVector.Select(l => (JsonValue)l).ToArray()), + ["NullableDoubleVector"] = new JsonArray(s_doubleVector.Select(l => (JsonValue)l).ToArray()), + } + }; + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal(key, dataModel.Key); + Assert.Equal("string", dataModel.Data["StringDataProp"]); + Assert.Equal(true, dataModel.Data["BoolDataProp"]); + Assert.Equal(false, dataModel.Data["NullableBoolDataProp"]); + Assert.Equal(1, dataModel.Data["IntDataProp"]); + Assert.Equal(2, dataModel.Data["NullableIntDataProp"]); + Assert.Equal(3L, dataModel.Data["LongDataProp"]); + Assert.Equal(4L, dataModel.Data["NullableLongDataProp"]); + Assert.Equal((short)5, dataModel.Data["ShortDataProp"]); + Assert.Equal((short)6, dataModel.Data["NullableShortDataProp"]); + Assert.Equal((byte)7, dataModel.Data["ByteDataProp"]); + Assert.Equal((byte)8, dataModel.Data["NullableByteDataProp"]); + Assert.Equal(9.0f, dataModel.Data["FloatDataProp"]); + Assert.Equal(10.0f, dataModel.Data["NullableFloatDataProp"]); + Assert.Equal(11.0, dataModel.Data["DoubleDataProp"]); + Assert.Equal(12.0, dataModel.Data["NullableDoubleDataProp"]); + Assert.Equal(13.99m, dataModel.Data["DecimalDataProp"]); + Assert.Equal(14.00m, dataModel.Data["NullableDecimalDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), dataModel.Data["DateTimeDataProp"]); + Assert.Equal(new DateTime(2021, 1, 1, 0, 0, 0), dataModel.Data["NullableDateTimeDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["DateTimeOffsetDataProp"]); + Assert.Equal(new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero), dataModel.Data["NullableDateTimeOffsetDataProp"]); + Assert.Equal(new Guid("11111111-1111-1111-1111-111111111111"), dataModel.Data["GuidDataProp"]); + Assert.Equal(new Guid("22222222-2222-2222-2222-222222222222"), dataModel.Data["NullableGuidDataProp"]); + Assert.Equal(s_taglist, dataModel.Data["TagListDataProp"]); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["FloatVector"]!).ToArray()); + Assert.Equal(s_floatVector, ((ReadOnlyMemory)dataModel.Vectors["NullableFloatVector"]!)!.ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["DoubleVector"]!).ToArray()); + Assert.Equal(s_doubleVector, ((ReadOnlyMemory)dataModel.Vectors["NullableDoubleVector"]!)!.ToArray()); + } + + [Fact] + public void MapFromStorageToDataModelMapsNullValues() + { + // Arrange + var key = new Guid("55555555-5555-5555-5555-555555555555"); + var keyProperty = new VectorStoreRecordKeyProperty("Key", typeof(Guid)); + + var dataProperties = new List + { + new("StringDataProp", typeof(string)), + new("NullableIntDataProp", typeof(int?)), + }; + + var vectorProperties = new List + { + new("NullableFloatVector", typeof(ReadOnlyMemory?)) + }; + + var storageModel = new JsonObject + { + ["id"] = key, + ["properties"] = new JsonObject + { + ["StringDataProp"] = null, + ["NullableIntDataProp"] = null, + }, + ["vectors"] = new JsonObject + { + ["NullableFloatVector"] = null + } + }; + + var sut = new WeaviateGenericDataModelMapper( + "Collection", + s_keyProperty, + s_dataProperties, + s_vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal(key, dataModel.Key); + Assert.Null(dataModel.Data["StringDataProp"]); + Assert.Null(dataModel.Data["NullableIntDataProp"]); + Assert.Null(dataModel.Vectors["NullableFloatVector"]); + } + + [Fact] + public void MapFromStorageToDataModelThrowsForMissingKey() + { + // Arrange + var sut = new WeaviateGenericDataModelMapper( + "Collection", + s_keyProperty, + s_dataProperties, + s_vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var storageModel = new JsonObject(); + + // Act & Assert + var exception = Assert.Throws( + () => sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true })); + } + + [Fact] + public void MapFromDataToStorageModelSkipsMissingProperties() + { + // Arrange + var key = new Guid("55555555-5555-5555-5555-555555555555"); + var keyProperty = new VectorStoreRecordKeyProperty("Key", typeof(Guid)); + + var dataProperties = new List + { + new("StringDataProp", typeof(string)), + new("NullableIntDataProp", typeof(int?)), + }; + + var vectorProperties = new List + { + new("FloatVector", typeof(ReadOnlyMemory)) + }; + + var dataModel = new VectorStoreGenericDataModel(key); + var sut = new WeaviateGenericDataModelMapper( + "Collection", + keyProperty, + dataProperties, + vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + // Act + var storageModel = sut.MapFromDataToStorageModel(dataModel); + + // Assert + Assert.Equal(key, (Guid?)storageModel["id"]); + Assert.False(storageModel.ContainsKey("StringDataProp")); + Assert.False(storageModel.ContainsKey("FloatVector")); + } + + [Fact] + public void MapFromStorageToDataModelSkipsMissingProperties() + { + // Arrange + var key = new Guid("55555555-5555-5555-5555-555555555555"); + var keyProperty = new VectorStoreRecordKeyProperty("Key", typeof(Guid)); + + var dataProperties = new List + { + new("StringDataProp", typeof(string)), + new("NullableIntDataProp", typeof(int?)), + }; + + var vectorProperties = new List + { + new("FloatVector", typeof(ReadOnlyMemory)) + }; + + var sut = new WeaviateGenericDataModelMapper( + "Collection", + keyProperty, + dataProperties, + vectorProperties, + s_storagePropertyNames, + s_jsonSerializerOptions); + + var storageModel = new JsonObject + { + ["id"] = key + }; + + // Act + var dataModel = sut.MapFromStorageToDataModel(storageModel, new StorageToDataModelMapperOptions { IncludeVectors = true }); + + // Assert + Assert.Equal(key, dataModel.Key); + Assert.False(dataModel.Data.ContainsKey("StringDataProp")); + Assert.False(dataModel.Vectors.ContainsKey("FloatVector")); + } +} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs new file mode 100644 index 000000000000..6b38dbc507e9 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateHotel.cs @@ -0,0 +1,49 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Data; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +#pragma warning disable CS8618 + +public sealed record WeaviateHotel +{ + /// The key of the record. + [VectorStoreRecordKey] + public Guid HotelId { get; init; } + + /// A string metadata field. + [VectorStoreRecordData(IsFilterable = true)] + public string? HotelName { get; set; } + + /// An int metadata field. + [VectorStoreRecordData] + public int HotelCode { get; set; } + + /// A float metadata field. + [VectorStoreRecordData] + public float? HotelRating { get; set; } + + /// A bool metadata field. + [JsonPropertyName("parking_is_included")] + [VectorStoreRecordData] + public bool ParkingIncluded { get; set; } + + /// An array metadata field. + [VectorStoreRecordData] + public List Tags { get; set; } = []; + + /// A data field. + [VectorStoreRecordData(IsFullTextSearchable = true)] + public string Description { get; set; } + + [VectorStoreRecordData] + public DateTimeOffset Timestamp { get; set; } + + /// A vector field. + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.Hnsw, DistanceFunction: DistanceFunction.CosineDistance)] + public ReadOnlyMemory? DescriptionEmbedding { get; set; } +} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs new file mode 100644 index 000000000000..60c6525b797b --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateKernelBuilderExtensionsTests.cs @@ -0,0 +1,36 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateKernelBuilderExtensionsTests +{ + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + + [Fact] + public void AddVectorStoreRegistersClass() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(Mock.Of()); + + // Act + this._kernelBuilder.AddWeaviateVectorStore(); + + var kernel = this._kernelBuilder.Build(); + var vectorStore = kernel.Services.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } +} diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs similarity index 90% rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs rename to dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs index 58fb5c23ee08..d3c4a2a0c92f 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryBuilderExtensionsTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryBuilderExtensionsTests.cs @@ -18,6 +18,11 @@ namespace SemanticKernel.Connectors.UnitTests.Weaviate; public sealed class WeaviateMemoryBuilderExtensionsTests : IDisposable { + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + private readonly HttpMessageHandlerStub _messageHandlerStub; private readonly HttpClient _httpClient; @@ -46,7 +51,7 @@ public async Task WeaviateMemoryStoreShouldBeProperlyInitializedAsync(string? ap } }; - this._messageHandlerStub.ResponseToReturn.Content = new StringContent(JsonSerializer.Serialize(getResponse, new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }), Encoding.UTF8, MediaTypeNames.Application.Json); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent(JsonSerializer.Serialize(getResponse, s_jsonSerializerOptions), Encoding.UTF8, MediaTypeNames.Application.Json); var builder = new MemoryBuilder(); builder.WithWeaviateMemoryStore(this._httpClient, "https://fake-random-test-weaviate-host", "fake-api-key", apiVersion); diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs similarity index 92% rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs rename to dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs index a19a7df73192..97134f46818a 100644 --- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateMemoryStoreTests.cs @@ -18,6 +18,11 @@ namespace SemanticKernel.Connectors.UnitTests.Weaviate; /// public sealed class WeaviateMemoryStoreTests : IDisposable { + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + private readonly HttpMessageHandlerStub _messageHandlerStub; private readonly HttpClient _httpClient; @@ -35,7 +40,7 @@ public WeaviateMemoryStoreTests() } }; - this._messageHandlerStub.ResponseToReturn.Content = new StringContent(JsonSerializer.Serialize(getResponse, new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }), Encoding.UTF8, MediaTypeNames.Application.Json); + this._messageHandlerStub.ResponseToReturn.Content = new StringContent(JsonSerializer.Serialize(getResponse, s_jsonSerializerOptions), Encoding.UTF8, MediaTypeNames.Application.Json); this._httpClient = new HttpClient(this._messageHandlerStub, false); } diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs new file mode 100644 index 000000000000..74ed9f185485 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateServiceCollectionExtensionsTests.cs @@ -0,0 +1,36 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Net.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateServiceCollectionExtensionsTests +{ + private readonly IServiceCollection _serviceCollection = new ServiceCollection(); + + [Fact] + public void AddVectorStoreRegistersClass() + { + // Arrange + this._serviceCollection.AddSingleton(Mock.Of()); + + // Act + this._serviceCollection.AddWeaviateVectorStore(); + + var serviceProvider = this._serviceCollection.BuildServiceProvider(); + var vectorStore = serviceProvider.GetRequiredService(); + + // Assert + Assert.NotNull(vectorStore); + Assert.IsType(vectorStore); + } +} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs new file mode 100644 index 000000000000..e81bb12b97fa --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreCollectionCreateMappingTests.cs @@ -0,0 +1,184 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateVectorStoreCollectionCreateMappingTests +{ + [Fact] + public void ItThrowsExceptionWithInvalidIndexKind() + { + // Arrange + var vectorProperties = new List + { + new("PropertyName", typeof(ReadOnlyMemory)) { IndexKind = "non-existent-index-kind" } + }; + + var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + + // Act & Assert + Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + collectionName: "CollectionName", + dataProperties: [], + vectorProperties: vectorProperties, + storagePropertyNames: storagePropertyNames)); + } + + [Theory] + [InlineData(IndexKind.Hnsw, "hnsw")] + [InlineData(IndexKind.Flat, "flat")] + [InlineData(IndexKind.Dynamic, "dynamic")] + public void ItReturnsCorrectSchemaWithValidIndexKind(string indexKind, string expectedIndexKind) + { + // Arrange + var vectorProperties = new List + { + new("PropertyName", typeof(ReadOnlyMemory)) { IndexKind = indexKind } + }; + + var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + + // Act + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + collectionName: "CollectionName", + dataProperties: [], + vectorProperties: vectorProperties, + storagePropertyNames: storagePropertyNames); + + var actualIndexKind = schema.VectorConfigurations["propertyName"].VectorIndexType; + + // Assert + Assert.Equal(expectedIndexKind, actualIndexKind); + } + + [Fact] + public void ItThrowsExceptionWithInvalidDistanceFunction() + { + // Arrange + var vectorProperties = new List + { + new("PropertyName", typeof(ReadOnlyMemory)) { DistanceFunction = "non-existent-distance-function" } + }; + + var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + + // Act & Assert + Assert.Throws(() => WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + collectionName: "CollectionName", + dataProperties: [], + vectorProperties: vectorProperties, + storagePropertyNames: storagePropertyNames)); + } + + [Theory] + [InlineData(DistanceFunction.CosineDistance, "cosine")] + [InlineData(DistanceFunction.DotProductSimilarity, "dot")] + [InlineData(DistanceFunction.EuclideanSquaredDistance, "l2-squared")] + [InlineData(DistanceFunction.Hamming, "hamming")] + [InlineData(DistanceFunction.ManhattanDistance, "manhattan")] + public void ItReturnsCorrectSchemaWithValidDistanceFunction(string distanceFunction, string expectedDistanceFunction) + { + // Arrange + var vectorProperties = new List + { + new("PropertyName", typeof(ReadOnlyMemory)) { DistanceFunction = distanceFunction } + }; + + var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + + // Act + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + collectionName: "CollectionName", + dataProperties: [], + vectorProperties: vectorProperties, + storagePropertyNames: storagePropertyNames); + + var actualDistanceFunction = schema.VectorConfigurations["propertyName"].VectorIndexConfig?.Distance; + + // Assert + Assert.Equal(expectedDistanceFunction, actualDistanceFunction); + } + + [Theory] + [InlineData(typeof(string), "text")] + [InlineData(typeof(List), "text[]")] + [InlineData(typeof(int), "int")] + [InlineData(typeof(int?), "int")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(long), "int")] + [InlineData(typeof(long?), "int")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(short), "int")] + [InlineData(typeof(short?), "int")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(byte), "int")] + [InlineData(typeof(byte?), "int")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(List), "int[]")] + [InlineData(typeof(float), "number")] + [InlineData(typeof(float?), "number")] + [InlineData(typeof(List), "number[]")] + [InlineData(typeof(List), "number[]")] + [InlineData(typeof(double), "number")] + [InlineData(typeof(double?), "number")] + [InlineData(typeof(List), "number[]")] + [InlineData(typeof(List), "number[]")] + [InlineData(typeof(decimal), "number")] + [InlineData(typeof(decimal?), "number")] + [InlineData(typeof(List), "number[]")] + [InlineData(typeof(List), "number[]")] + [InlineData(typeof(DateTime), "date")] + [InlineData(typeof(DateTime?), "date")] + [InlineData(typeof(List), "date[]")] + [InlineData(typeof(List), "date[]")] + [InlineData(typeof(DateTimeOffset), "date")] + [InlineData(typeof(DateTimeOffset?), "date")] + [InlineData(typeof(List), "date[]")] + [InlineData(typeof(List), "date[]")] + [InlineData(typeof(Guid), "uuid")] + [InlineData(typeof(Guid?), "uuid")] + [InlineData(typeof(List), "uuid[]")] + [InlineData(typeof(List), "uuid[]")] + [InlineData(typeof(bool), "boolean")] + [InlineData(typeof(bool?), "boolean")] + [InlineData(typeof(List), "boolean[]")] + [InlineData(typeof(List), "boolean[]")] + [InlineData(typeof(object), "object")] + [InlineData(typeof(List), "object[]")] + public void ItMapsPropertyCorrectly(Type propertyType, string expectedPropertyType) + { + // Arrange + var dataProperties = new List + { + new("PropertyName", propertyType) { IsFilterable = true, IsFullTextSearchable = true } + }; + + var storagePropertyNames = new Dictionary { ["PropertyName"] = "propertyName" }; + + // Act + var schema = WeaviateVectorStoreCollectionCreateMapping.MapToSchema( + collectionName: "CollectionName", + dataProperties: dataProperties, + vectorProperties: [], + storagePropertyNames: storagePropertyNames); + + var property = schema.Properties[0]; + + // Assert + Assert.Equal("propertyName", property.Name); + Assert.Equal(expectedPropertyType, property.DataType[0]); + Assert.True(property.IndexSearchable); + Assert.True(property.IndexFilterable); + } +} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..8f7450aa0913 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordCollectionTests.cs @@ -0,0 +1,470 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateVectorStoreRecordCollectionTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub = new(); + private readonly HttpClient _mockHttpClient; + + public WeaviateVectorStoreRecordCollectionTests() + { + this._mockHttpClient = new(this._messageHandlerStub, false) { BaseAddress = new Uri("http://default-endpoint") }; + } + + [Fact] + public void ConstructorForModelWithoutKeyThrowsException() + { + // Act & Assert + var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "collection")); + Assert.Contains("No key property found", exception.Message); + } + + [Fact] + public void ConstructorWithoutEndpointThrowsException() + { + // Arrange + using var httpClient = new HttpClient(); + + // Act & Assert + var exception = Assert.Throws(() => new WeaviateVectorStoreRecordCollection(httpClient, "collection")); + Assert.Contains("Weaviate endpoint should be provided", exception.Message); + } + + [Fact] + public void ConstructorWithDeclarativeModelInitializesCollection() + { + // Act & Assert + var collection = new WeaviateVectorStoreRecordCollection( + this._mockHttpClient, + "collection"); + + Assert.NotNull(collection); + } + + [Fact] + public void ConstructorWithImperativeModelInitializesCollection() + { + // Arrange + var definition = new VectorStoreRecordDefinition + { + Properties = [new VectorStoreRecordKeyProperty("Id", typeof(Guid))] + }; + + // Act + var collection = new WeaviateVectorStoreRecordCollection( + this._mockHttpClient, + "collection", + new() { VectorStoreRecordDefinition = definition }); + + // Assert + Assert.NotNull(collection); + } + + [Theory] + [MemberData(nameof(CollectionExistsData))] + public async Task CollectionExistsReturnsValidResultAsync(HttpResponseMessage responseMessage, bool expectedResult) + { + // Arrange + this._messageHandlerStub.ResponseToReturn = responseMessage; + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + + // Act + var actualResult = await sut.CollectionExistsAsync(); + + // Assert + Assert.Equal(expectedResult, actualResult); + } + + [Fact] + public async Task CreateCollectionUsesValidCollectionSchemaAsync() + { + // Arrange + const string CollectionName = "Collection"; + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + + // Act + await sut.CreateCollectionAsync(); + + // Assert + var schemaRequest = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + + Assert.NotNull(schemaRequest); + + Assert.Equal(CollectionName, schemaRequest.CollectionName); + + Assert.NotNull(schemaRequest.VectorConfigurations); + Assert.Equal("descriptionEmbedding", schemaRequest.VectorConfigurations.Keys.First()); + + var vectorConfiguration = schemaRequest.VectorConfigurations["descriptionEmbedding"]; + + Assert.Equal("cosine", vectorConfiguration.VectorIndexConfig?.Distance); + Assert.Equal("hnsw", vectorConfiguration.VectorIndexType); + + Assert.NotNull(schemaRequest.Properties); + + this.AssertSchemaProperty(schemaRequest.Properties[0], "hotelName", "text", true, false); + this.AssertSchemaProperty(schemaRequest.Properties[1], "hotelCode", "int", false, false); + this.AssertSchemaProperty(schemaRequest.Properties[2], "hotelRating", "number", false, false); + this.AssertSchemaProperty(schemaRequest.Properties[3], "parking_is_included", "boolean", false, false); + this.AssertSchemaProperty(schemaRequest.Properties[4], "tags", "text[]", false, false); + this.AssertSchemaProperty(schemaRequest.Properties[5], "description", "text", false, true); + this.AssertSchemaProperty(schemaRequest.Properties[6], "timestamp", "date", false, false); + } + + [Fact] + public async Task DeleteCollectionSendsValidRequestAsync() + { + // Arrange + const string CollectionName = "Collection"; + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + + // Act + await sut.DeleteCollectionAsync(); + + // Assert + Assert.Equal("http://default-endpoint/schema/Collection", this._messageHandlerStub.RequestUri?.AbsoluteUri); + Assert.Equal(HttpMethod.Delete, this._messageHandlerStub.Method); + } + + [Fact] + public async Task DeleteSendsValidRequestAsync() + { + // Arrange + const string CollectionName = "Collection"; + var id = new Guid("55555555-5555-5555-5555-555555555555"); + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + + // Act + await sut.DeleteAsync(id); + + // Assert + Assert.Equal("http://default-endpoint/objects/Collection/55555555-5555-5555-5555-555555555555", this._messageHandlerStub.RequestUri?.AbsoluteUri); + Assert.Equal(HttpMethod.Delete, this._messageHandlerStub.Method); + } + + [Fact] + public async Task DeleteBatchUsesValidQueryMatchAsync() + { + // Arrange + const string CollectionName = "Collection"; + List ids = [new Guid("11111111-1111-1111-1111-111111111111"), new Guid("22222222-2222-2222-2222-222222222222")]; + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName); + + // Act + await sut.DeleteBatchAsync(ids); + + // Assert + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + + Assert.NotNull(request?.Match); + + Assert.Equal(CollectionName, request.Match.CollectionName); + + Assert.NotNull(request.Match.WhereClause); + + var clause = request.Match.WhereClause; + + Assert.Equal("ContainsAny", clause.Operator); + Assert.Equal(["id"], clause.Path); + Assert.Equal(["11111111-1111-1111-1111-111111111111", "22222222-2222-2222-2222-222222222222"], clause.Values); + } + + [Fact] + public async Task GetExistingRecordReturnsValidRecordAsync() + { + // Arrange + var id = new Guid("55555555-5555-5555-5555-555555555555"); + + var jsonObject = new JsonObject { ["id"] = id.ToString(), ["properties"] = new JsonObject() }; + + jsonObject["properties"]!["hotelName"] = "Test Name"; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(jsonObject)) + }; + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + + // Act + var result = await sut.GetAsync(id); + + // Assert + Assert.NotNull(result); + Assert.Equal(id, result.HotelId); + Assert.Equal("Test Name", result.HotelName); + } + + [Fact] + public async Task GetExistingBatchRecordsReturnsValidRecordsAsync() + { + // Arrange + var id1 = new Guid("11111111-1111-1111-1111-111111111111"); + var id2 = new Guid("22222222-2222-2222-2222-222222222222"); + + var jsonObject1 = new JsonObject { ["id"] = id1.ToString(), ["properties"] = new JsonObject() }; + var jsonObject2 = new JsonObject { ["id"] = id2.ToString(), ["properties"] = new JsonObject() }; + + jsonObject1["properties"]!["hotelName"] = "Test Name 1"; + jsonObject2["properties"]!["hotelName"] = "Test Name 2"; + + using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(JsonSerializer.Serialize(jsonObject1)) }; + using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(JsonSerializer.Serialize(jsonObject2)) }; + + this._messageHandlerStub.ResponseQueue.Enqueue(response1); + this._messageHandlerStub.ResponseQueue.Enqueue(response2); + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + + // Act + var results = await sut.GetBatchAsync([id1, id2]).ToListAsync(); + + // Assert + Assert.NotNull(results[0]); + Assert.Equal(id1, results[0].HotelId); + Assert.Equal("Test Name 1", results[0].HotelName); + + Assert.NotNull(results[1]); + Assert.Equal(id2, results[1].HotelId); + Assert.Equal("Test Name 2", results[1].HotelName); + } + + [Fact] + public async Task UpsertReturnsRecordKeyAsync() + { + // Arrange + var id = new Guid("11111111-1111-1111-1111-111111111111"); + var hotel = new WeaviateHotel { HotelId = id, HotelName = "Test Name" }; + + var batchResponse = new List { new() { Id = id, Result = new() { Status = "Success" } } }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(batchResponse)), + }; + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + + // Act + var result = await sut.UpsertAsync(hotel); + + // Assert + Assert.Equal(id, result); + + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + + Assert.NotNull(request?.CollectionObjects); + + var jsonObject = request.CollectionObjects[0]; + + Assert.Equal("11111111-1111-1111-1111-111111111111", jsonObject["id"]?.GetValue()); + Assert.Equal("Test Name", jsonObject["properties"]?["hotelName"]?.GetValue()); + } + + [Fact] + public async Task UpsertReturnsRecordKeysAsync() + { + // Arrange + var id1 = new Guid("11111111-1111-1111-1111-111111111111"); + var id2 = new Guid("22222222-2222-2222-2222-222222222222"); + + var hotel1 = new WeaviateHotel { HotelId = id1, HotelName = "Test Name 1" }; + var hotel2 = new WeaviateHotel { HotelId = id2, HotelName = "Test Name 2" }; + + var batchResponse = new List + { + new() { Id = id1, Result = new() { Status = "Success" } }, + new() { Id = id2, Result = new() { Status = "Success" } } + }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(batchResponse)), + }; + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, "Collection"); + + // Act + var results = await sut.UpsertBatchAsync([hotel1, hotel2]).ToListAsync(); + + // Assert + Assert.Contains(id1, results); + Assert.Contains(id2, results); + + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + + Assert.NotNull(request?.CollectionObjects); + + var jsonObject1 = request.CollectionObjects[0]; + var jsonObject2 = request.CollectionObjects[1]; + + Assert.Equal("11111111-1111-1111-1111-111111111111", jsonObject1["id"]?.GetValue()); + Assert.Equal("Test Name 1", jsonObject1["properties"]?["hotelName"]?.GetValue()); + + Assert.Equal("22222222-2222-2222-2222-222222222222", jsonObject2["id"]?.GetValue()); + Assert.Equal("Test Name 2", jsonObject2["properties"]?["hotelName"]?.GetValue()); + } + + [Fact] + public async Task UpsertWithCustomMapperWorksCorrectlyAsync() + { + // Arrange + var id = new Guid("11111111-1111-1111-1111-111111111111"); + var hotel = new WeaviateHotel { HotelId = id, HotelName = "Test Name" }; + + var jsonObject = new JsonObject { ["id"] = id.ToString(), ["properties"] = new JsonObject() }; + + jsonObject["properties"]!["hotel_name"] = "Test Name from Mapper"; + + var mockMapper = new Mock>(); + + mockMapper + .Setup(l => l.MapFromDataToStorageModel(It.IsAny())) + .Returns(jsonObject); + + var batchResponse = new List { new() { Id = id, Result = new() { Status = "Success" } } }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(batchResponse)), + }; + + var sut = new WeaviateVectorStoreRecordCollection( + this._mockHttpClient, + "Collection", + new() { JsonObjectCustomMapper = mockMapper.Object }); + + // Act + var result = await sut.UpsertAsync(hotel); + + // Assert + Assert.Equal(id, result); + + var request = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent); + + Assert.NotNull(request?.CollectionObjects); + + var requestObject = request.CollectionObjects[0]; + + Assert.Equal("11111111-1111-1111-1111-111111111111", requestObject["id"]?.GetValue()); + Assert.Equal("Test Name from Mapper", requestObject["properties"]?["hotel_name"]?.GetValue()); + } + + [Fact] + public async Task GetWithCustomMapperWorksCorrectlyAsync() + { + // Arrange + var id = new Guid("11111111-1111-1111-1111-111111111111"); + var jsonObject = new JsonObject { ["id"] = id.ToString(), ["properties"] = new JsonObject() }; + + jsonObject["properties"]!["hotelName"] = "Test Name"; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(jsonObject)) + }; + + var mockMapper = new Mock>(); + + mockMapper + .Setup(l => l.MapFromStorageToDataModel(It.IsAny(), It.IsAny())) + .Returns(new WeaviateHotel { HotelId = id, HotelName = "Test Name from mapper" }); + + var sut = new WeaviateVectorStoreRecordCollection( + this._mockHttpClient, + "Collection", + new() { JsonObjectCustomMapper = mockMapper.Object }); + + // Act + var result = await sut.GetAsync(id); + + // Assert + Assert.NotNull(result); + Assert.Equal(id, result.HotelId); + Assert.Equal("Test Name from mapper", result.HotelName); + } + + [Theory] + [InlineData(true, "http://test-endpoint/schema", "Bearer fake-key")] + [InlineData(false, "http://default-endpoint/schema", null)] + public async Task ItUsesHttpClientParametersAsync(bool initializeOptions, string expectedEndpoint, string? expectedHeader) + { + // Arrange + const string CollectionName = "Collection"; + + var options = initializeOptions ? + new WeaviateVectorStoreRecordCollectionOptions() { Endpoint = new Uri("http://test-endpoint"), ApiKey = "fake-key" } : + null; + + var sut = new WeaviateVectorStoreRecordCollection(this._mockHttpClient, CollectionName, options); + + // Act + await sut.CreateCollectionAsync(); + + var headers = this._messageHandlerStub.RequestHeaders; + var endpoint = this._messageHandlerStub.RequestUri; + + // Assert + Assert.Equal(expectedEndpoint, endpoint?.AbsoluteUri); + Assert.Equal(expectedHeader, headers?.Authorization?.ToString()); + } + + public void Dispose() + { + this._mockHttpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } + + public static TheoryData CollectionExistsData => new() + { + { new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(JsonSerializer.Serialize(new WeaviateGetCollectionSchemaResponse { CollectionName = "Collection" })) }, true }, + { new HttpResponseMessage(HttpStatusCode.NotFound), false } + }; + + #region private + + private void AssertSchemaProperty( + WeaviateCollectionSchemaProperty property, + string propertyName, + string dataType, + bool indexFilterable, + bool indexSearchable) + { + Assert.NotNull(property); + Assert.Equal(propertyName, property.Name); + Assert.Equal(dataType, property.DataType[0]); + Assert.Equal(indexFilterable, property.IndexFilterable); + Assert.Equal(indexSearchable, property.IndexSearchable); + } + +#pragma warning disable CA1812 + private sealed class TestModel + { + public Guid Id { get; set; } + + public string? HotelName { get; set; } + } +#pragma warning restore CA1812 + + #endregion +} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs new file mode 100644 index 000000000000..9f8d2c4a0a53 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreRecordMapperTests.cs @@ -0,0 +1,113 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateVectorStoreRecordMapperTests +{ + private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = + { + new WeaviateDateTimeOffsetConverter(), + new WeaviateNullableDateTimeOffsetConverter() + } + }; + + private readonly WeaviateVectorStoreRecordMapper _sut; + + public WeaviateVectorStoreRecordMapperTests() + { + var storagePropertyNames = new Dictionary + { + ["HotelId"] = "hotelId", + ["HotelName"] = "hotelName", + ["Tags"] = "tags", + ["DescriptionEmbedding"] = "descriptionEmbedding", + }; + + var dataProperties = new List + { + new("HotelName", typeof(string)), + new("Tags", typeof(List)) + }; + + var vectorProperties = new List + { + new("DescriptionEmbedding", typeof(ReadOnlyMemory)) + }; + + this._sut = new WeaviateVectorStoreRecordMapper( + "CollectionName", + new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), + dataProperties, + vectorProperties, + storagePropertyNames, + s_jsonSerializerOptions); + } + + [Fact] + public void MapFromDataToStorageModelReturnsValidObject() + { + // Arrange + var hotel = new WeaviateHotel + { + HotelId = new Guid("55555555-5555-5555-5555-555555555555"), + HotelName = "Test Name", + Tags = ["tag1", "tag2"], + DescriptionEmbedding = new ReadOnlyMemory([1f, 2f, 3f]) + }; + + // Act + var document = this._sut.MapFromDataToStorageModel(hotel); + + // Assert + Assert.NotNull(document); + + Assert.Equal("55555555-5555-5555-5555-555555555555", document["id"]!.GetValue()); + Assert.Equal("Test Name", document["properties"]!["hotelName"]!.GetValue()); + Assert.Equal(["tag1", "tag2"], document["properties"]!["tags"]!.AsArray().Select(l => l!.GetValue())); + Assert.Equal([1f, 2f, 3f], document["vectors"]!["descriptionEmbedding"]!.AsArray().Select(l => l!.GetValue())); + } + + [Fact] + public void MapFromStorageToDataModelReturnsValidObject() + { + // Arrange + var document = new JsonObject + { + ["id"] = "55555555-5555-5555-5555-555555555555", + ["properties"] = new JsonObject(), + ["vectors"] = new JsonObject() + }; + + document["properties"]!["hotelName"] = "Test Name"; + document["properties"]!["tags"] = new JsonArray(new List { "tag1", "tag2" }.Select(l => JsonValue.Create(l)).ToArray()); + document["vectors"]!["descriptionEmbedding"] = new JsonArray(new List { 1f, 2f, 3f }.Select(l => JsonValue.Create(l)).ToArray()); + + // Act + var hotel = this._sut.MapFromStorageToDataModel(document, new() { IncludeVectors = true }); + + // Assert + Assert.NotNull(hotel); + + Assert.Equal(new Guid("55555555-5555-5555-5555-555555555555"), hotel.HotelId); + Assert.Equal("Test Name", hotel.HotelName); + Assert.Equal(["tag1", "tag2"], hotel.Tags); + Assert.True(new ReadOnlyMemory([1f, 2f, 3f]).Span.SequenceEqual(hotel.DescriptionEmbedding!.Value.Span)); + } +} diff --git a/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs new file mode 100644 index 000000000000..012d9c7b2369 --- /dev/null +++ b/dotnet/src/Connectors/Connectors.Weaviate.UnitTests/WeaviateVectorStoreTests.cs @@ -0,0 +1,111 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Moq; +using Xunit; + +namespace SemanticKernel.Connectors.Weaviate.UnitTests; + +/// +/// Unit tests for class. +/// +public sealed class WeaviateVectorStoreTests : IDisposable +{ + private readonly HttpMessageHandlerStub _messageHandlerStub = new(); + private readonly HttpClient _mockHttpClient; + + public WeaviateVectorStoreTests() + { + this._mockHttpClient = new(this._messageHandlerStub, false) { BaseAddress = new Uri("http://test") }; + } + + [Fact] + public void GetCollectionWithNotSupportedKeyThrowsException() + { + // Arrange + var sut = new WeaviateVectorStore(this._mockHttpClient); + + // Act & Assert + Assert.Throws(() => sut.GetCollection("collection")); + } + + [Fact] + public void GetCollectionWithSupportedKeyReturnsCollection() + { + // Arrange + var sut = new WeaviateVectorStore(this._mockHttpClient); + + // Act + var collection = sut.GetCollection("collection1"); + + // Assert + Assert.NotNull(collection); + } + + [Fact] + public void GetCollectionWithFactoryReturnsCustomCollection() + { + // Arrange + var mockFactory = new Mock(); + var mockRecordCollection = new Mock>(); + + mockFactory + .Setup(l => l.CreateVectorStoreRecordCollection( + this._mockHttpClient, + "collection", + It.IsAny())) + .Returns(mockRecordCollection.Object); + + var sut = new WeaviateVectorStore( + this._mockHttpClient, + new WeaviateVectorStoreOptions { VectorStoreCollectionFactory = mockFactory.Object }); + + // Act + var collection = sut.GetCollection("collection"); + + // Assert + Assert.Same(mockRecordCollection.Object, collection); + mockFactory.Verify(l => l.CreateVectorStoreRecordCollection( + this._mockHttpClient, + "collection", + It.IsAny()), Times.Once()); + } + + [Fact] + public async Task ListCollectionNamesReturnsCollectionNamesAsync() + { + // Arrange + var expectedCollectionNames = new List { "Collection1", "Collection2", "Collection3" }; + var response = new WeaviateGetCollectionsResponse + { + Collections = expectedCollectionNames.Select(name => new WeaviateCollectionSchema(name)).ToList() + }; + + this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(response)) + }; + + var sut = new WeaviateVectorStore(this._mockHttpClient); + + // Act + var actualCollectionNames = await sut.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Equal(expectedCollectionNames, actualCollectionNames); + } + + public void Dispose() + { + this._mockHttpClient.Dispose(); + this._messageHandlerStub.Dispose(); + } +} diff --git a/dotnet/src/Experimental/Agents.UnitTests/ChatCompletionAgentTests.cs b/dotnet/src/Experimental/Agents.UnitTests/ChatCompletionAgentTests.cs deleted file mode 100644 index e08d1c9b4415..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/ChatCompletionAgentTests.cs +++ /dev/null @@ -1,95 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Experimental.Agents; -using Moq; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; -public class ChatCompletionAgentTests -{ - private readonly IKernelBuilder _kernelBuilder; - - public ChatCompletionAgentTests() - { - this._kernelBuilder = Kernel.CreateBuilder(); - } - - [Fact] - public async Task ItShouldResolveChatCompletionServiceFromKernelAsync() - { - // Arrange - var mockChatCompletionService = new Mock(); - - this._kernelBuilder.Services.AddSingleton(mockChatCompletionService.Object); - - var agent = new ChatCompletionAgent(this._kernelBuilder.Build(), "fake-instructions"); - - // Act - var result = await agent.InvokeAsync([]); - - // Assert - mockChatCompletionService.Verify(x => - x.GetChatMessageContentsAsync( - It.IsAny(), - It.IsAny(), - It.IsAny(), - It.IsAny()), - Times.Once); - } - - [Fact] - public async Task ItShouldAddSystemInstructionsAndMessagesToChatHistoryAsync() - { - // Arrange - var mockChatCompletionService = new Mock(); - - this._kernelBuilder.Services.AddSingleton(mockChatCompletionService.Object); - - var agent = new ChatCompletionAgent(this._kernelBuilder.Build(), "fake-instructions"); - - // Act - var result = await agent.InvokeAsync([new(AuthorRole.User, "fake-user-message")]); - - // Assert - mockChatCompletionService.Verify( - x => x.GetChatMessageContentsAsync( - It.Is(ch => ch.Count == 2 && - ch.Any(m => m.Role == AuthorRole.System && m.Content == "fake-instructions") && - ch.Any(m => m.Role == AuthorRole.User && m.Content == "fake-user-message")), - It.IsAny(), - It.IsAny(), - It.IsAny()), - Times.Once); - } - - [Fact] - public async Task ItShouldReturnChatCompletionServiceMessagesAsync() - { - // Arrange - var mockChatCompletionService = new Mock(); - mockChatCompletionService - .Setup(ccs => ccs.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .ReturnsAsync([ - new(AuthorRole.Assistant, "fake-assistant-message-1"), - new(AuthorRole.Assistant, "fake-assistant-message-2") - ]); - - this._kernelBuilder.Services.AddSingleton(mockChatCompletionService.Object); - - var agent = new ChatCompletionAgent(this._kernelBuilder.Build(), "fake-instructions"); - - // Act - var result = await agent.InvokeAsync([]); - - // Assert - Assert.Equal(2, result.Count); - Assert.Contains(result, m => m.Role == AuthorRole.Assistant && m.Content == "fake-assistant-message-1"); - Assert.Contains(result, m => m.Role == AuthorRole.Assistant && m.Content == "fake-assistant-message-2"); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Experimental.Agents.UnitTests.csproj b/dotnet/src/Experimental/Agents.UnitTests/Experimental.Agents.UnitTests.csproj deleted file mode 100644 index 8d29367fae3b..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Experimental.Agents.UnitTests.csproj +++ /dev/null @@ -1,48 +0,0 @@ -๏ปฟ - - SemanticKernel.Experimental.Agents.UnitTests - SemanticKernel.Experimental.Agents.UnitTests - net8.0 - true - enable - disable - false - $(NoWarn);CS1591;SKEXP0101 - - - - - - - - - - - - - - - - - - all - - - all - - - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - PreserveNewest - - - \ No newline at end of file diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelExtensionTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelExtensionTests.cs deleted file mode 100644 index fc900c13f932..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelExtensionTests.cs +++ /dev/null @@ -1,44 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; -using Microsoft.SemanticKernel.Experimental.Agents.Extensions; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -[Trait("Category", "Unit Tests")] -[Trait("Feature", "Agent")] -public sealed class KernelExtensionTests -{ - private const string TwoPartToolName = "Fake-Bogus"; - - [Fact] - public static void InvokeTwoPartTool() - { - //Arrange - var function = KernelFunctionFactory.CreateFromMethod(() => { }, functionName: "Bogus"); - - var kernel = new Kernel(); - kernel.ImportPluginFromFunctions("Fake", [function]); - - //Act - var tool = kernel.GetAssistantTool(TwoPartToolName); - - //Assert - Assert.NotNull(tool); - Assert.Equal("Bogus", tool.Name); - } - - [Theory] - [InlineData("Bogus")] - [InlineData("i-am-not-valid")] - public static void InvokeInvalidSinglePartTool(string toolName) - { - //Arrange - var kernel = new Kernel(); - - //Act & Assert - Assert.Throws(() => kernel.GetAssistantTool(toolName)); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelFunctionExtensionTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelFunctionExtensionTests.cs deleted file mode 100644 index b69aead79981..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Extensions/KernelFunctionExtensionTests.cs +++ /dev/null @@ -1,52 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Experimental.Agents; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -[Trait("Category", "Unit Tests")] -[Trait("Feature", "Agent")] -public sealed class KernelFunctionExtensionTests -{ - private const string ToolName = "Bogus"; - private const string PluginName = "Fake"; - - [Fact] - public static void GetTwoPartName() - { - var function = KernelFunctionFactory.CreateFromMethod(() => true, ToolName); - - string qualifiedName = function.GetQualifiedName(PluginName); - - Assert.Equal($"{PluginName}-{ToolName}", qualifiedName); - } - - [Fact] - public static void GetToolModelFromFunction() - { - const string FunctionDescription = "Bogus description"; - const string RequiredParamName = "required"; - const string OptionalParamName = "optional"; - - var requiredParam = new KernelParameterMetadata("required") { IsRequired = true }; - var optionalParam = new KernelParameterMetadata("optional"); - var parameters = new List { requiredParam, optionalParam }; - var function = KernelFunctionFactory.CreateFromMethod(() => true, ToolName, FunctionDescription, parameters); - - var toolModel = function.ToToolModel(PluginName); - var properties = toolModel.Function?.Parameters.Properties; - var required = toolModel.Function?.Parameters.Required; - - Assert.Equal("function", toolModel.Type); - Assert.Equal($"{PluginName}-{ToolName}", toolModel.Function?.Name); - Assert.Equal(FunctionDescription, toolModel.Function?.Description); - Assert.Equal(2, properties?.Count); - Assert.True(properties?.ContainsKey(RequiredParamName)); - Assert.True(properties?.ContainsKey(OptionalParamName)); - Assert.Equal(1, required?.Count ?? 0); - Assert.True(required?.Contains(RequiredParamName) ?? false); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs deleted file mode 100644 index c6773cea232f..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.AssistantTests.cs +++ /dev/null @@ -1,68 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -[Trait("Category", "Unit Tests")] -[Trait("Feature", "Agent")] -public sealed class OpenAIRestExtensionsAssistantsTests -{ - private const string BogusEndpoint = "http://localhost"; - private const string BogusApiKey = "bogus"; - private const string TestAgentId = "agentId"; - - private readonly AssistantModel _assistantModel = new(); - private readonly OpenAIRestContext _restContext; - private readonly Mock _mockHttpMessageHandler = new(); - - public OpenAIRestExtensionsAssistantsTests() - { - this._mockHttpMessageHandler - .Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); - this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); - } - - [Fact] - public async Task CreateAssistantModelAsync() - { - await this._restContext.CreateAssistantModelAsync(this._assistantModel).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetAssistantsUrl()); - } - - [Fact] - public async Task GetAssistantModelAsync() - { - await this._restContext.GetAssistantModelAsync(TestAgentId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetAssistantUrl(TestAgentId)); - } - - [Fact] - public async Task ListAssistantModelsAsync() - { - await this._restContext.ListAssistantModelsAsync(10, false, "20").ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, $"{this._restContext.GetAssistantsUrl()}?limit=10&order=desc&after=20"); - } - - [Fact] - public async Task DeleteAssistantsModelAsync() - { - await this._restContext.DeleteAssistantModelAsync(TestAgentId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Delete, 1, this._restContext.GetAssistantUrl(TestAgentId)); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs deleted file mode 100644 index 86d44b5cf2a4..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.MessagesTests.cs +++ /dev/null @@ -1,70 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -[Trait("Category", "Unit Tests")] -[Trait("Feature", "Agent")] -public sealed class OpenAIRestExtensionsMessagesTests -{ - private const string BogusEndpoint = "http://localhost"; - private const string BogusApiKey = "bogus"; - private const string TestThreadId = "threadId"; - private const string TestMessageId = "msgId"; - private const string TestContent = "Blah blah"; - - private readonly OpenAIRestContext _restContext; - private readonly Mock _mockHttpMessageHandler = new(); - - public OpenAIRestExtensionsMessagesTests() - { - this._mockHttpMessageHandler - .Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); - this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); - } - - [Fact] - public async Task CreateMessageModelAsync() - { - await this._restContext.CreateUserTextMessageAsync(TestThreadId, TestContent, fileIds: null).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetMessagesUrl(TestThreadId)); - } - - [Fact] - public async Task GetMessageModelAsync() - { - await this._restContext.GetMessageAsync(TestThreadId, TestMessageId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetMessagesUrl(TestThreadId, TestMessageId)); - } - - [Fact] - public async Task GetMessageModelsAsync() - { - await this._restContext.GetMessagesAsync(TestThreadId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetMessagesUrl(TestThreadId)); - } - - [Fact] - public async Task GetSpecificMessageModelsAsync() - { - var messageIDs = new string[] { "1", "2", "3" }; - - await this._restContext.GetMessagesAsync(TestThreadId, messageIDs).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, messageIDs.Length); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs deleted file mode 100644 index 4dcc85cf4b68..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.RunTests.cs +++ /dev/null @@ -1,72 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -[Trait("Category", "Unit Tests")] -[Trait("Feature", "Agent")] -public sealed class OpenAIRestExtensionsRunTests -{ - private const string BogusEndpoint = "http://localhost"; - private const string BogusApiKey = "bogus"; - private const string TestAgentId = "agentId"; - private const string TestThreadId = "threadId"; - private const string TestRunId = "runId"; - - private readonly OpenAIRestContext _restContext; - private readonly Mock _mockHttpMessageHandler = new(); - - public OpenAIRestExtensionsRunTests() - { - this._mockHttpMessageHandler - .Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); - this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); - } - - [Fact] - public async Task CreateRunAsync() - { - await this._restContext.CreateRunAsync(TestThreadId, TestAgentId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetRunsUrl(TestThreadId)); - } - - [Fact] - public async Task GetRunAsync() - { - await this._restContext.GetRunAsync(TestThreadId, TestRunId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetRunUrl(TestThreadId, TestRunId)); - } - - [Fact] - public async Task GetRunStepsAsync() - { - await this._restContext.GetRunStepsAsync(TestThreadId, TestRunId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetRunStepsUrl(TestThreadId, TestRunId)); - } - - [Fact] - public async Task AddToolOutputsAsync() - { - var toolResults = Array.Empty(); - - await this._restContext.AddToolOutputsAsync(TestThreadId, TestRunId, toolResults).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetRunToolOutputUrl(TestThreadId, TestRunId)); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs b/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs deleted file mode 100644 index 0738cd85bfc2..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Extensions/OpenAIRestExtensions.ThreadTests.cs +++ /dev/null @@ -1,58 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Moq; -using Moq.Protected; -using Xunit; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -[Trait("Category", "Unit Tests")] -[Trait("Feature", "Agent")] -public sealed class OpenAIRestExtensionsThreadTests -{ - private const string BogusEndpoint = "http://localhost"; - private const string BogusApiKey = "bogus"; - private const string TestThreadId = "threadId"; - - private readonly OpenAIRestContext _restContext; - private readonly Mock _mockHttpMessageHandler = new(); - - public OpenAIRestExtensionsThreadTests() - { - this._mockHttpMessageHandler - .Protected() - .Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) - .ReturnsAsync(() => new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent("{}") }); - this._restContext = new(BogusEndpoint, BogusApiKey, () => new HttpClient(this._mockHttpMessageHandler.Object)); - } - - [Fact] - public async Task CreateThreadModelAsync() - { - await this._restContext.CreateThreadModelAsync().ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Post, 1, this._restContext.GetThreadsUrl()); - } - - [Fact] - public async Task GetThreadModelAsync() - { - await this._restContext.GetThreadModelAsync(TestThreadId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Get, 1, this._restContext.GetThreadUrl(TestThreadId)); - } - - [Fact] - public async Task DeleteThreadModelAsync() - { - await this._restContext.DeleteThreadModelAsync(TestThreadId).ConfigureAwait(true); - - this._mockHttpMessageHandler.VerifyMock(HttpMethod.Delete, 1, this._restContext.GetThreadUrl(TestThreadId)); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Integration/AgentHarness.cs b/dotnet/src/Experimental/Agents.UnitTests/Integration/AgentHarness.cs deleted file mode 100644 index 6513b1edfa25..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Integration/AgentHarness.cs +++ /dev/null @@ -1,141 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -#define DISABLEHOST // Comment line to enable -using System; -using System.Collections.Generic; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.Experimental.Agents.UnitTests.Integration; - -/// -/// Dev harness for manipulating agents. -/// -/// -/// Comment out DISABLEHOST definition to enable tests. -/// Not enabled by default. -/// -[Trait("Category", "Integration Tests")] -[Trait("Feature", "Agent")] -public sealed class AgentHarness(ITestOutputHelper output) -{ - private const string SkipReason = -#if DISABLEHOST - "Harness only for local/dev environment"; -#else - null; -#endif - - private readonly ITestOutputHelper _output = output; - - /// - /// Verify creation and retrieval of agent. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyAgentLifecycleAsync() - { - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .WithInstructions("say something funny") - .WithName("Fred") - .WithDescription("test agent") - .BuildAsync().ConfigureAwait(true); - - this.DumpAgent(agent); - - var copy = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .GetAsync(agentId: agent.Id).ConfigureAwait(true); - - this.DumpAgent(copy); - } - - /// - /// Verify creation and retrieval of agent. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyAgentDefinitionAsync() - { - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .FromTemplatePath("Templates/PoetAgent.yaml") - .BuildAsync() - .ConfigureAwait(true); - - this.DumpAgent(agent); - - var copy = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .GetAsync(agentId: agent.Id).ConfigureAwait(true); - - this.DumpAgent(copy); - } - - /// - /// Verify creation and retrieval of agent. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyAgentListAsync() - { - var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); - var agents = await context.ListAssistantModelsAsync().ConfigureAwait(true); - foreach (var agent in agents) - { - this.DumpAgent(agent); - } - } - - /// - /// Verify creation and retrieval of agent. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyAgentDeleteAsync() - { - var names = - new HashSet(StringComparer.OrdinalIgnoreCase) - { - "Fred", - "Barney", - "DeleteMe", - "Poet", - "Math Tutor", - }; - - var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); - var agents = await context.ListAssistantModelsAsync().ConfigureAwait(true); - foreach (var agent in agents) - { - if (!string.IsNullOrWhiteSpace(agent.Name) && names.Contains(agent.Name)) - { - this._output.WriteLine($"Removing: {agent.Name} - {agent.Id}"); - await context.DeleteAssistantModelAsync(agent.Id).ConfigureAwait(true); - } - } - } - - private void DumpAgent(AssistantModel agent) - { - this._output.WriteLine($"# {agent.Id}"); - this._output.WriteLine($"# {agent.Model}"); - this._output.WriteLine($"# {agent.Instructions}"); - this._output.WriteLine($"# {agent.Name}"); - this._output.WriteLine($"# {agent.Description}{Environment.NewLine}"); - } - - private void DumpAgent(IAgent agent) - { - this._output.WriteLine($"# {agent.Id}"); - this._output.WriteLine($"# {agent.Model}"); - this._output.WriteLine($"# {agent.Instructions}"); - this._output.WriteLine($"# {agent.Name}"); - this._output.WriteLine($"# {agent.Description}{Environment.NewLine}"); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Integration/RunHarness.cs b/dotnet/src/Experimental/Agents.UnitTests/Integration/RunHarness.cs deleted file mode 100644 index 0326b059f821..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Integration/RunHarness.cs +++ /dev/null @@ -1,149 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -#define DISABLEHOST // Comment line to enable -using System.Collections.Generic; -using System.ComponentModel; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Experimental.Agents; -using Xunit; -using Xunit.Abstractions; - -#pragma warning disable CA1812 // Uninstantiated internal types - -namespace SemanticKernel.Experimental.Agents.UnitTests.Integration; - -/// -/// Dev harness for manipulating runs. -/// -/// -/// Comment out DISABLEHOST definition to enable tests. -/// Not enabled by default. -/// -[Trait("Category", "Integration Tests")] -[Trait("Feature", "Agent")] -public sealed class RunHarness(ITestOutputHelper output) -{ -#if DISABLEHOST - private const string SkipReason = "Harness only for local/dev environment"; -#else - private const string SkipReason = null; -#endif - - private readonly ITestOutputHelper _output = output; - - /// - /// Verify creation of run. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyRunLifecycleAsync() - { - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .WithInstructions("say something funny") - .WithName("Fred") - .WithDescription("funny agent") - .BuildAsync().ConfigureAwait(true); - - var thread = await agent.NewThreadAsync().ConfigureAwait(true); - - await this.ChatAsync( - thread, - agent, - "I was on my way to the store this morning and...", - "That was great! Tell me another.").ConfigureAwait(true); - } - - /// - /// Verify creation of run. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyRunFromDefinitionAsync() - { - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .FromTemplatePath("Templates/PoetAgent.yaml") - .BuildAsync() - .ConfigureAwait(true); - - var thread = await agent.NewThreadAsync().ConfigureAwait(true); - - await this.ChatAsync( - thread, - agent, - "Eggs are yummy and beautiful geometric gems.", - "It rains a lot in Seattle.").ConfigureAwait(true); - } - - /// - /// Verify creation of run. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyFunctionLifecycleAsync() - { - var gamePlugin = KernelPluginFactory.CreateFromType(); - - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .FromTemplatePath("Templates/GameAgent.yaml") - .WithPlugin(gamePlugin) - .BuildAsync() - .ConfigureAwait(true); - - var thread = await agent.NewThreadAsync().ConfigureAwait(true); - - await this.ChatAsync( - thread, - agent, - "What is the question for the guessing game?", - "Is it 'RED'?", - "What is the answer?").ConfigureAwait(true); - } - - private async Task ChatAsync(IAgentThread thread, IAgent agent, params string[] messages) - { - foreach (var message in messages) - { - var messageUser = await thread.AddUserMessageAsync(message).ConfigureAwait(true); - this.LogMessage(messageUser); - - var agentMessages = await thread.InvokeAsync(agent).ToArrayAsync().ConfigureAwait(true); - this.LogMessages(agentMessages); - } - } - - private void LogMessages(IEnumerable messages) - { - foreach (var message in messages) - { - this.LogMessage(message); - } - } - - private void LogMessage(IChatMessage message) - { - this._output.WriteLine($"# {message.Id}"); - this._output.WriteLine($"# {message.Content}"); - this._output.WriteLine($"# {message.Role}"); - this._output.WriteLine($"# {message.AgentId}"); - } - - private sealed class GuessingGame - { - /// - /// Get the question - /// - [KernelFunction, Description("Get the guessing game question")] - public string GetQuestion() => "What color am I thinking of?"; - - /// - /// Get the answer - /// - [KernelFunction, Description("Get the answer to the guessing game question.")] - public string GetAnswer() => "Blue"; - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Integration/ThreadHarness.cs b/dotnet/src/Experimental/Agents.UnitTests/Integration/ThreadHarness.cs deleted file mode 100644 index c1629a1c301d..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Integration/ThreadHarness.cs +++ /dev/null @@ -1,91 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -#define DISABLEHOST // Comment line to enable -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Experimental.Agents; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.Experimental.Agents.UnitTests.Integration; - -/// -/// Dev harness for manipulating threads. -/// -/// -/// Comment out DISABLEHOST definition to enable tests. -/// Not enabled by default. -/// -[Trait("Category", "Integration Tests")] -[Trait("Feature", "Agent")] -public sealed class ThreadHarness(ITestOutputHelper output) -{ -#if DISABLEHOST - private const string SkipReason = "Harness only for local/dev environment"; -#else - private const string SkipReason = null; -#endif - - private readonly ITestOutputHelper _output = output; - - /// - /// Verify creation and retrieval of thread. - /// - [Fact(Skip = SkipReason)] - public async Task VerifyThreadLifecycleAsync() - { - var agent = - await new AgentBuilder() - .WithOpenAIChatCompletion(TestConfig.SupportedGpt35TurboModel, TestConfig.OpenAIApiKey) - .WithName("DeleteMe") - .BuildAsync() - .ConfigureAwait(true); - - var thread = await agent.NewThreadAsync().ConfigureAwait(true); - - Assert.NotNull(thread.Id); - - this._output.WriteLine($"# {thread.Id}"); - - var message = await thread.AddUserMessageAsync("I'm so confused!").ConfigureAwait(true); - Assert.NotNull(message); - - this._output.WriteLine($"# {message.Id}"); - - var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); - var copy = await context.GetThreadModelAsync(thread.Id).ConfigureAwait(true); - - await context.DeleteThreadModelAsync(thread.Id).ConfigureAwait(true); - - await Assert.ThrowsAsync(() => context.GetThreadModelAsync(thread.Id)).ConfigureAwait(true); - } - - /// - /// Verify retrieval of thread messages - /// - [Fact(Skip = SkipReason)] - public async Task GetThreadAsync() - { - var threadId = ""; - - var context = new OpenAIRestContext(AgentBuilder.OpenAIBaseUrl, TestConfig.OpenAIApiKey); - var thread = await ChatThread.GetAsync(context, threadId); - - int index = 0; - string? messageId = null; - while (messageId is not null || index == 0) - { - var messages = await thread.GetMessagesAsync(count: 100, lastMessageId: messageId).ConfigureAwait(true); - foreach (var message in messages) - { - ++index; - this._output.WriteLine($"#{index:000} [{message.Id}] {message.Role} [{message.AgentId ?? "n/a"}]"); - - this._output.WriteLine(message.Content); - } - - messageId = messages.Count > 0 ? messages[messages.Count - 1].Id : null; - } - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/MockExtensions.cs b/dotnet/src/Experimental/Agents.UnitTests/MockExtensions.cs deleted file mode 100644 index b3a8add2e6b7..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/MockExtensions.cs +++ /dev/null @@ -1,20 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using System.Threading; -using Moq; -using Moq.Protected; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -internal static class MockExtensions -{ - public static void VerifyMock(this Mock mockHandler, HttpMethod method, int times, string? uri = null) - { - mockHandler.Protected().Verify( - "SendAsync", - Times.Exactly(times), - ItExpr.Is(req => req.Method == method && (uri == null || req.RequestUri!.AbsoluteUri.StartsWith(uri))), - ItExpr.IsAny()); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/Templates/GameAgent.yaml b/dotnet/src/Experimental/Agents.UnitTests/Templates/GameAgent.yaml deleted file mode 100644 index 1f548b665839..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Templates/GameAgent.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: Fred -template: | - Run a guessing game where the user tries to guess the answer to a question but don't tell them the answer unless they give up by asking for the answer. - diff --git a/dotnet/src/Experimental/Agents.UnitTests/Templates/PoetAgent.yaml b/dotnet/src/Experimental/Agents.UnitTests/Templates/PoetAgent.yaml deleted file mode 100644 index 6bcec526ee73..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/Templates/PoetAgent.yaml +++ /dev/null @@ -1,4 +0,0 @@ -name: Poet -template: | - Compose a sonnet inspired by the user input. -description: You are a poet that composes poems based on user input. diff --git a/dotnet/src/Experimental/Agents.UnitTests/TestConfig.cs b/dotnet/src/Experimental/Agents.UnitTests/TestConfig.cs deleted file mode 100644 index e11087c02285..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/TestConfig.cs +++ /dev/null @@ -1,29 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Reflection; -using Microsoft.Extensions.Configuration; -using Xunit.Sdk; - -namespace SemanticKernel.Experimental.Agents.UnitTests; - -internal static class TestConfig -{ - public const string SupportedGpt35TurboModel = "gpt-3.5-turbo-1106"; - - public static IConfiguration Configuration { get; } = CreateConfiguration(); - - public static string OpenAIApiKey => - TestConfig.Configuration.GetValue("OpenAIApiKey") ?? - throw new TestClassException("Missing OpenAI APIKey."); - - private static IConfiguration CreateConfiguration() - { - return - new ConfigurationBuilder() - .AddEnvironmentVariables() - .AddJsonFile("testsettings.json") - .AddJsonFile("testsettings.development.json", optional: true) - .AddUserSecrets(Assembly.GetExecutingAssembly()) - .Build(); - } -} diff --git a/dotnet/src/Experimental/Agents.UnitTests/testsettings.json b/dotnet/src/Experimental/Agents.UnitTests/testsettings.json deleted file mode 100644 index d456a389e0f9..000000000000 --- a/dotnet/src/Experimental/Agents.UnitTests/testsettings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "OpenAIApiKey": "" -} diff --git a/dotnet/src/Experimental/Agents/AgentBuilder.cs b/dotnet/src/Experimental/Agents/AgentBuilder.cs deleted file mode 100644 index 53e5661402fd..000000000000 --- a/dotnet/src/Experimental/Agents/AgentBuilder.cs +++ /dev/null @@ -1,359 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Fluent builder for initializing an instance. -/// -public partial class AgentBuilder -{ - internal const string OpenAIBaseUrl = "https://api.openai.com/v1"; - - private readonly AssistantModel _model; - private readonly KernelPluginCollection _plugins; - private readonly HashSet _tools; - private readonly List _fileIds; - private string? _apiKey; - private string? _endpoint; - private string? _version; - private Func? _httpClientProvider; - private PromptTemplateConfig? _config; - - /// - /// Initializes a new instance of the class. - /// - public AgentBuilder() - { - this._model = new AssistantModel(); - this._plugins = []; - this._tools = new HashSet(StringComparer.OrdinalIgnoreCase); - this._fileIds = []; - } - - /// - /// Create a instance. - /// - /// A cancellation token - /// A new instance. - public async Task BuildAsync(CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(this._model.Model)) - { - throw new AgentException("Model must be defined for agent."); - } - - if (string.IsNullOrWhiteSpace(this._apiKey)) - { - throw new AgentException("ApiKey must be provided for agent."); - } - - if (string.IsNullOrWhiteSpace(this._endpoint)) - { - throw new AgentException("Endpoint must be provided for agent."); - } - - this._model.Tools.AddRange(this._tools.Select(t => new ToolModel { Type = t })); - this._model.FileIds.AddRange(this._fileIds.Distinct(StringComparer.OrdinalIgnoreCase)); - - return - await Agent.CreateAsync( - new OpenAIRestContext(this._endpoint!, this._apiKey!, this._version, this._httpClientProvider), - this._model, - this._config, - this._plugins, - cancellationToken).ConfigureAwait(false); - } - - /// - /// Create a instance. - /// - /// The agent id to retrieve - /// A cancellation token - /// A new instance. - public async Task GetAsync(string agentId, CancellationToken cancellationToken = default) - { - Verify.NotNull(agentId, nameof(agentId)); - - if (string.IsNullOrWhiteSpace(this._apiKey)) - { - throw new AgentException("ApiKey must be provided for agent."); - } - - if (string.IsNullOrWhiteSpace(this._endpoint)) - { - throw new AgentException("Endpoint must be provided for agent."); - } - - var restContext = new OpenAIRestContext(this._endpoint!, this._apiKey!, this._version, this._httpClientProvider); - var model = await restContext.GetAssistantModelAsync(agentId, cancellationToken).ConfigureAwait(false); - - return new Agent(model, this._config, restContext, this._plugins); - } - - /// - /// Define the OpenAI chat completion service (required). - /// - /// instance for fluid expression. - public AgentBuilder WithAzureOpenAIChatCompletion(string endpoint, string model, string apiKey, string? version = null) - { - this._apiKey = apiKey; - this._model.Model = model; - this._endpoint = $"{endpoint}/openai"; - this._version = version ?? "2024-02-15-preview"; - - return this; - } - - /// - /// Define the OpenAI chat completion service (required). - /// - /// instance for fluid expression. - public AgentBuilder WithOpenAIChatCompletion(string model, string apiKey) - { - this._apiKey = apiKey; - this._model.Model = model; - this._endpoint = OpenAIBaseUrl; - - return this; - } - - /// - /// Create a new agent from a yaml formatted string. - /// - /// YAML agent definition. - /// instance for fluid expression. - public AgentBuilder FromTemplate(string template) - { - this._config = KernelFunctionYaml.ToPromptTemplateConfig(template); - - this.WithInstructions(this._config.Template.Trim()); - - if (!string.IsNullOrWhiteSpace(this._config.Name)) - { - this.WithName(this._config.Name?.Trim()); - } - - if (!string.IsNullOrWhiteSpace(this._config.Description)) - { - this.WithDescription(this._config.Description?.Trim()); - } - - return this; - } - - /// - /// Create a new agent from a yaml template. - /// - /// Path to a configuration file. - /// instance for fluid expression. - public AgentBuilder FromTemplatePath(string templatePath) - { - var yamlContent = File.ReadAllText(templatePath); - - return this.FromTemplate(yamlContent); - } - - /// - /// Provide an httpclient (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithHttpClient(HttpClient httpClient) - { - this._httpClientProvider ??= () => httpClient; - - return this; - } - - /// - /// Define the agent description (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithDescription(string? description) - { - this._model.Description = description; - - return this; - } - - /// - /// Define the agent instructions (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithInstructions(string instructions) - { - this._model.Instructions = instructions; - - return this; - } - - /// - /// Define the agent metadata (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithMetadata(string key, object value) - { - this._model.Metadata[key] = value; - - return this; - } - - /// - /// Define the agent metadata (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithMetadata(IDictionary metadata) - { - foreach (var kvp in metadata) - { - this._model.Metadata[kvp.Key] = kvp.Value; - } - - return this; - } - - /// - /// Define the agent name (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithName(string? name) - { - this._model.Name = name; - - return this; - } - - /// - /// Enable the code-interpreter tool with this agent. - /// - /// instance for fluid expression. - public AgentBuilder WithCodeInterpreter() - { - this._tools.Add(Agent.ToolCodeInterpreter); - - return this; - } - - /// - /// Enable the retrieval tool with this agent. - /// - /// Optional set of uploaded file identifiers. - /// instance for fluid expression. - public AgentBuilder WithRetrieval(params string[] fileIds) - { - this._tools.Add(Agent.ToolRetrieval); - - return this.WithFiles(fileIds); - } - - /// - /// Define functions associated with agent instance (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithPlugin(KernelPlugin? plugin) - { - if (plugin is not null) - { - this._plugins.Add(plugin); - } - - return this; - } - - /// - /// Define functions associated with agent instance (optional). - /// - /// instance for fluid expression. - public AgentBuilder WithPlugins(IEnumerable plugins) - { - this._plugins.AddRange(plugins); - - return this; - } - - /// - /// Associate an uploaded file with the agent, by identifier. - /// - /// The uploaded file identifier. - /// instance for fluid expression. - public AgentBuilder WithFile(string fileId) - { - if (!string.IsNullOrWhiteSpace(fileId)) - { - this._fileIds.Add(fileId); - } - - return this; - } - - /// - /// Associate uploaded files with the agent, by identifier. - /// - /// The uploaded file identifiers. - /// instance for fluid expression. - public AgentBuilder WithFiles(params string[] fileIds) - { - if (fileIds.Length > 0) - { - this._fileIds.AddRange(fileIds); - } - - return this; - } - - /// - /// Retrieve defined agents from an Azure OpenAI endpoint. - /// - /// - /// The can be used to retrieve a hydrated agent via / - /// - public static async Task> GetAzureOpenAIAgentsAsync(string endpoint, string apiKey, string? version = null) - { - endpoint = $"{endpoint}/openai"; - version ??= "2024-02-15-preview"; - - var context = new OpenAIRestContext(endpoint!, apiKey, version); - var result = await context.ListAssistantModelsAsync().ConfigureAwait(false); - - return - result.Select( - m => - new AgentReference() - { - Id = m.Id, - Name = m.Name - }).ToArray(); - } - - /// - /// Retrieve defined agents from OpenAI services. - /// - /// - /// The can be used to retrieve a hydrated agent via / - /// - public static async Task> GetOpenAIAgentsAsync(string apiKey) - { - var context = new OpenAIRestContext(OpenAIBaseUrl, apiKey); - - var result = await context.ListAssistantModelsAsync().ConfigureAwait(false); - - return - result.Select( - m => - new AgentReference() - { - Id = m.Id, - Name = m.Name - }).ToArray(); - } -} diff --git a/dotnet/src/Experimental/Agents/AgentCapability.cs b/dotnet/src/Experimental/Agents/AgentCapability.cs deleted file mode 100644 index 66c9a815bc53..000000000000 --- a/dotnet/src/Experimental/Agents/AgentCapability.cs +++ /dev/null @@ -1,32 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Description of agent capabilities. -/// -[Flags] -public enum AgentCapability -{ - /// - /// No additional capabilities. - /// - None = 0, - - /// - /// Has function / plugin capability. - /// - Functions, - - /// - /// Has document / data retrieval capability. - /// - Retrieval, - - /// - /// Has code-interpereter capability. - /// - CodeInterpreter, -} diff --git a/dotnet/src/Experimental/Agents/AgentPlugin.cs b/dotnet/src/Experimental/Agents/AgentPlugin.cs deleted file mode 100644 index 1c8d4acc9859..000000000000 --- a/dotnet/src/Experimental/Agents/AgentPlugin.cs +++ /dev/null @@ -1,53 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Specialization of for -/// -public abstract class AgentPlugin : KernelPlugin -{ - /// - protected AgentPlugin(string name, string? description = null) - : base(name, description) - { - // No specialization... - } - - internal abstract Agent Agent { get; } - - /// - /// Invoke plugin with user input - /// - /// The user input - /// A cancel token - /// The agent response - public async Task InvokeAsync(string input, CancellationToken cancellationToken = default) - { - return await this.InvokeAsync(input, arguments: null, cancellationToken).ConfigureAwait(false); - } - - /// - /// Invoke plugin with user input - /// - /// The user input - /// The arguments - /// A cancel token - /// The agent response - public async Task InvokeAsync(string input, KernelArguments? arguments, CancellationToken cancellationToken = default) - { - arguments ??= []; - - arguments["input"] = input; - - var result = await this.First().InvokeAsync(this.Agent.Kernel, arguments, cancellationToken).ConfigureAwait(false); - var response = result.GetValue()!; - - return response.Message; - } -} diff --git a/dotnet/src/Experimental/Agents/AgentReference.cs b/dotnet/src/Experimental/Agents/AgentReference.cs deleted file mode 100644 index beffab6e3e81..000000000000 --- a/dotnet/src/Experimental/Agents/AgentReference.cs +++ /dev/null @@ -1,19 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Response from agent when called as a . -/// -public class AgentReference -{ - /// - /// The agent identifier (which can be referenced in API endpoints). - /// - public string Id { get; internal set; } = string.Empty; - - /// - /// Name of the agent - /// - public string? Name { get; internal set; } -} diff --git a/dotnet/src/Experimental/Agents/AgentResponse.cs b/dotnet/src/Experimental/Agents/AgentResponse.cs deleted file mode 100644 index 658656c7a0d4..000000000000 --- a/dotnet/src/Experimental/Agents/AgentResponse.cs +++ /dev/null @@ -1,29 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Response from agent when called as a . -/// -public class AgentResponse -{ - /// - /// The thread-id for the agent conversation. - /// - [JsonPropertyName("thread_id")] - public string ThreadId { get; set; } = string.Empty; - - /// - /// The agent response. - /// - [JsonPropertyName("response")] - public string Message { get; set; } = string.Empty; - - /// - /// Instructions from agent on next steps. - /// - [JsonPropertyName("system_instructions")] - public string Instructions { get; set; } = string.Empty; -} diff --git a/dotnet/src/Experimental/Agents/ChatCompletionAgent.cs b/dotnet/src/Experimental/Agents/ChatCompletionAgent.cs deleted file mode 100644 index 5150c0ee218e..000000000000 --- a/dotnet/src/Experimental/Agents/ChatCompletionAgent.cs +++ /dev/null @@ -1,66 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.ChatCompletion; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Represent an agent that is built around the SK ChatCompletion API and leverages the API's capabilities. -/// -public sealed class ChatCompletionAgent -{ - private readonly Kernel _kernel; - private readonly string _instructions; - private readonly PromptExecutionSettings? _promptExecutionSettings; - - /// - /// Initializes a new instance of the class. - /// - /// The containing services, plugins, and other state for use by the agent. - /// The instructions for the agent. - /// The optional execution settings for the agent. If not provided, default settings will be used. - public ChatCompletionAgent(Kernel kernel, string instructions, PromptExecutionSettings? executionSettings = null) - { - Verify.NotNull(kernel, nameof(kernel)); - this._kernel = kernel; - - Verify.NotNullOrWhiteSpace(instructions, nameof(instructions)); - this._instructions = instructions; - - this._promptExecutionSettings = executionSettings; - } - - /// - /// Invokes the agent to process the given messages and generate a response. - /// - /// A list of the messages for the agent to process. - /// An optional to cancel the operation. - /// List of messages representing the agent's response. - public async Task> InvokeAsync(IReadOnlyList messages, CancellationToken cancellationToken = default) - { - var chat = new ChatHistory(this._instructions); - chat.AddRange(messages); - - var chatCompletionService = this.GetChatCompletionService(); - - var chatMessageContent = await chatCompletionService.GetChatMessageContentsAsync( - chat, - this._promptExecutionSettings, - this._kernel, - cancellationToken).ConfigureAwait(false); - - return chatMessageContent; - } - - /// - /// Resolves and returns the chat completion service. - /// - /// An instance of the chat completion service. - private IChatCompletionService GetChatCompletionService() - { - return this._kernel.GetRequiredService(); - } -} diff --git a/dotnet/src/Experimental/Agents/Exceptions/AgentException.cs b/dotnet/src/Experimental/Agents/Exceptions/AgentException.cs deleted file mode 100644 index 25c60b710b11..000000000000 --- a/dotnet/src/Experimental/Agents/Exceptions/AgentException.cs +++ /dev/null @@ -1,35 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Exceptions; - -/// -/// Agent specific . -/// -public class AgentException : KernelException -{ - /// - /// Initializes a new instance of the class. - /// - public AgentException() - { - } - - /// - /// Initializes a new instance of the class with a specified error message. - /// - /// The error message that explains the reason for the exception. - public AgentException(string? message) : base(message) - { - } - - /// - /// Initializes a new instance of the class with a specified error message and a reference to the inner exception that is the cause of this exception. - /// - /// The error message that explains the reason for the exception. - /// The exception that is the cause of the current exception, or a null reference if no inner exception is specified. - public AgentException(string? message, Exception? innerException) : base(message, innerException) - { - } -} diff --git a/dotnet/src/Experimental/Agents/Experimental.Agents.csproj b/dotnet/src/Experimental/Agents/Experimental.Agents.csproj deleted file mode 100644 index b5038dbabde9..000000000000 --- a/dotnet/src/Experimental/Agents/Experimental.Agents.csproj +++ /dev/null @@ -1,31 +0,0 @@ -๏ปฟ - - - Microsoft.SemanticKernel.Experimental.Agents - Microsoft.SemanticKernel.Experimental.Agents - net8.0;netstandard2.0 - alpha - - - - - - Semantic Kernel Agents - Semantic Kernel Agents - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelExtensions.cs deleted file mode 100644 index 5f2c6596a3ee..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelExtensions.cs +++ /dev/null @@ -1,21 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Extensions; - -internal static class AssistantsKernelExtensions -{ - /// - /// Retrieve a kernel function based on the tool name. - /// - public static KernelFunction GetAssistantTool(this Kernel kernel, string toolName) - { - string[] nameParts = toolName.Split('-'); - return nameParts.Length switch - { - 2 => kernel.Plugins.GetFunction(nameParts[0], nameParts[1]), - _ => throw new AgentException($"Unknown tool: {toolName}"), - }; - } -} diff --git a/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelFunctionExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelFunctionExtensions.cs deleted file mode 100644 index 37ffd9b9ed7c..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/AssistantsKernelFunctionExtensions.cs +++ /dev/null @@ -1,111 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -internal static class AssistantsKernelFunctionExtensions -{ - /// - /// Produce a fully qualified toolname. - /// - public static string GetQualifiedName(this KernelFunction function, string pluginName) - { - return $"{pluginName}-{function.Name}"; - } - - /// - /// Convert to an OpenAI tool model. - /// - /// The source function - /// The plugin name - /// An OpenAI tool model - public static ToolModel ToToolModel(this KernelFunction function, string pluginName) - { - var metadata = function.Metadata; - var required = new List(metadata.Parameters.Count); - var properties = - metadata.Parameters.ToDictionary( - p => p.Name, - p => - { - if (p.IsRequired) - { - required.Add(p.Name); - } - - return - new OpenAIParameter - { - Type = ConvertType(p.ParameterType), - Description = p.Description, - }; - }); - - var payload = - new ToolModel - { - Type = "function", - Function = - new() - { - Name = function.GetQualifiedName(pluginName), - Description = function.Description, - Parameters = - new OpenAIParameters - { - Properties = properties, - Required = required, - }, - }, - }; - - return payload; - } - - private static string ConvertType(Type? type) - { - if (type is null || type == typeof(string)) - { - return "string"; - } - - if (type.IsNumber()) - { - return "number"; - } - - if (type == typeof(bool)) - { - return "boolean"; - } - - if (type.IsEnum) - { - return "enum"; - } - - if (type.IsArray) - { - return "array"; - } - - return "object"; - } - - private static bool IsNumber(this Type type) => - type == typeof(byte) || - type == typeof(sbyte) || - type == typeof(short) || - type == typeof(ushort) || - type == typeof(int) || - type == typeof(uint) || - type == typeof(long) || - type == typeof(ulong) || - type == typeof(float) || - type == typeof(double) || - type == typeof(decimal); -} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Assistant.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Assistant.cs deleted file mode 100644 index be19e285d684..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Assistant.cs +++ /dev/null @@ -1,132 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Globalization; -using System.Threading; -using System.Threading.Tasks; -using System.Web; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Supported OpenAI REST API actions for assistants. -/// -internal static partial class OpenAIRestExtensions -{ - /// - /// Create a new assistant. - /// - /// A context for accessing OpenAI REST endpoint - /// The assistant definition - /// A cancellation token - /// An assistant definition - public static Task CreateAssistantModelAsync( - this OpenAIRestContext context, - AssistantModel model, - CancellationToken cancellationToken = default) - { - var payload = - new - { - model = model.Model, - name = model.Name, - description = model.Description, - instructions = model.Instructions, - tools = model.Tools, - file_ids = model.FileIds, - metadata = model.Metadata, - }; - - return - context.ExecutePostAsync( - context.GetAssistantsUrl(), - payload, - cancellationToken); - } - - /// - /// Retrieve an assistant by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// The assistant identifier - /// A cancellation token - /// An assistant definition - public static Task GetAssistantModelAsync( - this OpenAIRestContext context, - string assistantId, - CancellationToken cancellationToken = default) - { - return - context.ExecuteGetAsync( - context.GetAssistantUrl(assistantId), - cancellationToken); - } - - /// - /// Retrieve all assistants. - /// - /// A context for accessing OpenAI REST endpoint - /// A limit on the number of objects to be returned. - /// Limit can range between 1 and 100, and the default is 20. - /// Set to true to sort by ascending created_at timestamp - /// instead of descending. - /// A cursor for use in pagination. This is an object ID that defines - /// your place in the list. For instance, if you make a list request and receive 100 objects, - /// ending with obj_foo, your subsequent call can include after=obj_foo in order to - /// fetch the next page of the list. - /// A cursor for use in pagination. This is an object ID that defines - /// your place in the list. For instance, if you make a list request and receive 100 objects, - /// ending with obj_foo, your subsequent call can include before=obj_foo in order to - /// fetch the previous page of the list. - /// List of retrieved Assistants - /// A cancellation token - /// An enumeration of assistant definitions - public static async Task> ListAssistantModelsAsync( - this OpenAIRestContext context, - int limit = 20, - bool ascending = false, - string? after = null, - string? before = null, - CancellationToken cancellationToken = default) - { - var query = HttpUtility.ParseQueryString(string.Empty); - query["limit"] = limit.ToString(CultureInfo.InvariantCulture); - query["order"] = ascending ? "asc" : "desc"; - if (!string.IsNullOrWhiteSpace(after)) - { - query["after"] = after; - } - if (!string.IsNullOrWhiteSpace(before)) - { - query["before"] = before; - } - - var result = - await context.ExecuteGetAsync( - context.GetAssistantsUrl(), - query.ToString(), - cancellationToken).ConfigureAwait(false); - - return result.Data; - } - - /// - /// Delete an existing assistant - /// - /// A context for accessing OpenAI REST endpoint - /// Identifier of assistant to delete - /// A cancellation token - public static Task DeleteAssistantModelAsync( - this OpenAIRestContext context, - string id, - CancellationToken cancellationToken = default) - { - return context.ExecuteDeleteAsync(context.GetAssistantUrl(id), cancellationToken); - } - - internal static string GetAssistantsUrl(this OpenAIRestContext context) => $"{context.Endpoint}/assistants"; - - internal static string GetAssistantUrl(this OpenAIRestContext context, string assistantId) => $"{context.Endpoint}/assistants/{assistantId}"; -} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.AssistantFiles.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.AssistantFiles.cs deleted file mode 100644 index bbb31226ff67..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.AssistantFiles.cs +++ /dev/null @@ -1,69 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Supported OpenAI REST API actions for managing assistant files. -/// -internal static partial class OpenAIRestExtensions -{ - /// - /// Associate uploaded file with the assistant, by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// The assistant identifier - /// The identifier of the uploaded file. - /// A cancellation token - /// An assistant definition - public static async Task AddAssistantFileAsync( - this OpenAIRestContext context, - string assistantId, - string fileId, - CancellationToken cancellationToken = default) - { - var payload = - new - { - file_id = fileId - }; - - var result = - await context.ExecutePostAsync( - context.GetAssistantFileUrl(assistantId), - payload, - cancellationToken).ConfigureAwait(false); - - return result.Id; - } - - /// - /// Disassociate uploaded file with from assistant, by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// The assistant identifier - /// The identifier of the uploaded file. - /// A cancellation token - public static Task RemoveAssistantFileAsync( - this OpenAIRestContext context, - string assistantId, - string fileId, - CancellationToken cancellationToken = default) - { - return context.ExecuteDeleteAsync(context.GetAssistantFileUrl(assistantId, fileId), cancellationToken); - } - - private static string GetAssistantFileUrl(this OpenAIRestContext context, string assistantId) - { - return $"{context.GetAssistantUrl(assistantId)}/files"; - } - - private static string GetAssistantFileUrl(this OpenAIRestContext context, string assistantId, string fileId) - { - return $"{context.GetAssistantUrl(assistantId)}/files/{fileId}"; - } -} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Messages.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Messages.cs deleted file mode 100644 index ee73eb991226..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Messages.cs +++ /dev/null @@ -1,127 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Supported OpenAI REST API actions for thread messages. -/// -internal static partial class OpenAIRestExtensions -{ - /// - /// Create a new message. - /// - /// A context for accessing OpenAI REST endpoint - /// The thread identifier - /// The message text - /// Up to 10 file ids - /// A cancellation token - /// A message definition - public static Task CreateUserTextMessageAsync( - this OpenAIRestContext context, - string threadId, - string content, - IEnumerable? fileIds, - CancellationToken cancellationToken = default) - { - var payload = - new - { - role = AuthorRole.User.Label, - file_ids = fileIds?.ToArray() ?? [], - content - }; - - return - context.ExecutePostAsync( - context.GetMessagesUrl(threadId), - payload, - cancellationToken); - } - - /// - /// Retrieve an message by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// The thread identifier - /// The message identifier - /// A cancellation token - /// A message definition - public static Task GetMessageAsync( - this OpenAIRestContext context, - string threadId, - string messageId, - CancellationToken cancellationToken = default) - { - return - context.ExecuteGetAsync( - context.GetMessagesUrl(threadId, messageId), - cancellationToken); - } - - /// - /// Retrieve all thread messages. - /// - /// A context for accessing OpenAI REST endpoint - /// The thread identifier - /// The identifier of the last message retrieved - /// The maximum number of messages requested (up to 100 / default: 25) - /// A cancellation token - /// A message list definition - public static Task GetMessagesAsync( - this OpenAIRestContext context, - string threadId, - string? lastId = null, - int? count = null, - CancellationToken cancellationToken = default) - { - return - context.ExecuteGetAsync( - context.GetMessagesUrl(threadId), - $"limit={count ?? 25}&after={lastId ?? string.Empty}", - cancellationToken); - } - - /// - /// Retrieve all thread messages. - /// - /// A context for accessing OpenAI REST endpoint - /// The thread identifier - /// The set of message identifiers to retrieve - /// A cancellation token - /// A message list definition - public static async Task> GetMessagesAsync( - this OpenAIRestContext context, - string threadId, - IEnumerable messageIds, - CancellationToken cancellationToken = default) - { - var tasks = - messageIds.Select( - id => - context.ExecuteGetAsync( - context.GetMessagesUrl(threadId, id), - cancellationToken)).ToArray(); - - await Task.WhenAll(tasks).ConfigureAwait(false); - - return tasks.Select(t => t.Result).ToArray(); - } - - internal static string GetMessagesUrl(this OpenAIRestContext context, string threadId) - { - return $"{context.GetThreadUrl(threadId)}/messages"; - } - - internal static string GetMessagesUrl(this OpenAIRestContext context, string threadId, string messageId) - { - return $"{context.GetThreadUrl(threadId)}/messages/{messageId}"; - } -} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Run.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Run.cs deleted file mode 100644 index 12d3538ad4de..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Run.cs +++ /dev/null @@ -1,137 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Supported OpenAI REST API actions for thread runs. -/// -internal static partial class OpenAIRestExtensions -{ - /// - /// Create a new run. - /// - /// A context for accessing OpenAI REST endpoint - /// A thread identifier - /// The assistant identifier - /// Optional instruction override - /// The assistant tools - /// A cancellation token - /// A run definition - public static Task CreateRunAsync( - this OpenAIRestContext context, - string threadId, - string assistantId, - string? instructions = null, - IEnumerable? tools = null, - CancellationToken cancellationToken = default) - { - var payload = - new - { - assistant_id = assistantId, - instructions, - tools, - }; - - return - context.ExecutePostAsync( - context.GetRunsUrl(threadId), - payload, - cancellationToken); - } - - /// - /// Retrieve an run by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// A thread identifier - /// A run identifier - /// A cancellation token - /// A run definition - public static Task GetRunAsync( - this OpenAIRestContext context, - string threadId, - string runId, - CancellationToken cancellationToken = default) - { - return - context.ExecuteGetAsync( - context.GetRunUrl(threadId, runId), - cancellationToken); - } - - /// - /// Retrieve run steps by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// A thread identifier - /// A run identifier - /// A cancellation token - /// A set of run steps - public static Task GetRunStepsAsync( - this OpenAIRestContext context, - string threadId, - string runId, - CancellationToken cancellationToken = default) - { - return - context.ExecuteGetAsync( - context.GetRunStepsUrl(threadId, runId), - cancellationToken); - } - - /// - /// Add a function result for a run. - /// - /// A context for accessing OpenAI REST endpoint - /// A thread identifier - /// The run identifier - /// The function/tool results. - /// A cancellation token - /// A run definition - public static Task AddToolOutputsAsync( - this OpenAIRestContext context, - string threadId, - string runId, - IEnumerable results, - CancellationToken cancellationToken = default) - { - var payload = - new - { - tool_outputs = results - }; - - return - context.ExecutePostAsync( - context.GetRunToolOutputUrl(threadId, runId), - payload, - cancellationToken); - } - - internal static string GetRunsUrl(this OpenAIRestContext context, string threadId) - { - return $"{context.GetThreadUrl(threadId)}/runs"; - } - - internal static string GetRunUrl(this OpenAIRestContext context, string threadId, string runId) - { - return $"{context.GetThreadUrl(threadId)}/runs/{runId}"; - } - - internal static string GetRunStepsUrl(this OpenAIRestContext context, string threadId, string runId) - { - return $"{context.GetThreadUrl(threadId)}/runs/{runId}/steps"; - } - - internal static string GetRunToolOutputUrl(this OpenAIRestContext context, string threadId, string runId) - { - return $"{context.GetThreadUrl(threadId)}/runs/{runId}/submit_tool_outputs"; - } -} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Thread.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Thread.cs deleted file mode 100644 index 062fbe8ade59..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.Thread.cs +++ /dev/null @@ -1,66 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Supported OpenAI REST API actions for threads. -/// -internal static partial class OpenAIRestExtensions -{ - /// - /// Create a new thread. - /// - /// A context for accessing OpenAI REST endpoint - /// A cancellation token - /// A thread definition - public static Task CreateThreadModelAsync( - this OpenAIRestContext context, - CancellationToken cancellationToken = default) - { - return - context.ExecutePostAsync( - context.GetThreadsUrl(), - cancellationToken); - } - - /// - /// Retrieve an thread by identifier. - /// - /// A context for accessing OpenAI REST endpoint - /// The thread identifier - /// A cancellation token - /// A thread definition - public static Task GetThreadModelAsync( - this OpenAIRestContext context, - string threadId, - CancellationToken cancellationToken = default) - { - return - context.ExecuteGetAsync( - context.GetThreadUrl(threadId), - cancellationToken); - } - - /// - /// Delete an existing thread. - /// - /// A context for accessing OpenAI REST endpoint - /// Identifier of thread to delete - /// A cancellation token - public static Task DeleteThreadModelAsync( - this OpenAIRestContext context, - string id, - CancellationToken cancellationToken = default) - { - return context.ExecuteDeleteAsync(context.GetThreadUrl(id), cancellationToken); - } - - internal static string GetThreadsUrl(this OpenAIRestContext context) => $"{context.Endpoint}/threads"; - - internal static string GetThreadUrl(this OpenAIRestContext context, string threadId) => $"{context.Endpoint}/threads/{threadId}"; -} diff --git a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs b/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs deleted file mode 100644 index aa4f324490d8..000000000000 --- a/dotnet/src/Experimental/Agents/Extensions/OpenAIRestExtensions.cs +++ /dev/null @@ -1,127 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Net.Http; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; -using Microsoft.SemanticKernel.Experimental.Agents.Internal; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -internal static partial class OpenAIRestExtensions -{ - private const string HeaderNameAuthorization = "Authorization"; - private const string HeaderNameAzureApiKey = "api-key"; - private const string HeaderNameOpenAIAssistant = "OpenAI-Beta"; - private const string HeaderNameUserAgent = "User-Agent"; - private const string HeaderOpenAIValueAssistant = "assistants=v1"; - - private static Task ExecuteGetAsync( - this OpenAIRestContext context, - string url, - CancellationToken cancellationToken = default) - { - return context.ExecuteGetAsync(url, query: null, cancellationToken); - } - - private static async Task ExecuteGetAsync( - this OpenAIRestContext context, - string url, - string? query = null, - CancellationToken cancellationToken = default) - { - using var request = HttpRequest.CreateGetRequest(context.FormatUrl(url, query)); - - request.AddHeaders(context); - - using var response = await context.GetHttpClient().SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - - var responseBody = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - // Common case is for failure exception to be raised by REST invocation. - // Null result is a logical possibility, but unlikely edge case. - // Might occur due to model alignment issues over time. - return - JsonSerializer.Deserialize(responseBody) ?? - throw new AgentException($"Null result processing: {typeof(TResult).Name}"); - } - - private static Task ExecutePostAsync( - this OpenAIRestContext context, - string url, - CancellationToken cancellationToken = default) - { - return context.ExecutePostAsync(url, payload: null, cancellationToken); - } - - private static async Task ExecutePostAsync( - this OpenAIRestContext context, - string url, - object? payload, - CancellationToken cancellationToken = default) - { - using var request = HttpRequest.CreatePostRequest(context.FormatUrl(url), payload); - - request.AddHeaders(context); - - using var response = await context.GetHttpClient().SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - - var responseBody = await response.Content.ReadAsStringWithExceptionMappingAsync().ConfigureAwait(false); - - return - JsonSerializer.Deserialize(responseBody) ?? - throw new AgentException($"Null result processing: {typeof(TResult).Name}"); - } - - private static async Task ExecuteDeleteAsync( - this OpenAIRestContext context, - string url, - CancellationToken cancellationToken = default) - { - using var request = HttpRequest.CreateDeleteRequest(context.FormatUrl(url)); - - request.AddHeaders(context); - - using var response = await context.GetHttpClient().SendWithSuccessCheckAsync(request, cancellationToken).ConfigureAwait(false); - } - - private static void AddHeaders(this HttpRequestMessage request, OpenAIRestContext context) - { - request.Headers.Add(HeaderNameOpenAIAssistant, HeaderOpenAIValueAssistant); - request.Headers.Add(HeaderNameUserAgent, HttpHeaderConstant.Values.UserAgent); - request.Headers.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIFileService))); - - if (context.HasVersion) - { - // Azure OpenAI - request.Headers.Add(HeaderNameAzureApiKey, context.ApiKey); - return; - } - - // OpenAI - request.Headers.Add(HeaderNameAuthorization, $"Bearer {context.ApiKey}"); - } - - private static string FormatUrl( - this OpenAIRestContext context, - string url, - string? query = null) - { - var hasQuery = !string.IsNullOrWhiteSpace(query); - var delimiter = hasQuery ? "?" : string.Empty; - - if (!context.HasVersion) - { - // OpenAI - return $"{url}{delimiter}{query}"; - } - - // Azure OpenAI - var delimiterB = hasQuery ? "&" : "?"; - - return $"{url}{delimiter}{query}{delimiterB}api-version={context.Version}"; - } -} diff --git a/dotnet/src/Experimental/Agents/IAgent.cs b/dotnet/src/Experimental/Agents/IAgent.cs deleted file mode 100644 index 69a0e9272756..000000000000 --- a/dotnet/src/Experimental/Agents/IAgent.cs +++ /dev/null @@ -1,120 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Represents an agent that can call the model and use tools. -/// -public interface IAgent -{ - /// - /// The agent identifier (which can be referenced in API endpoints). - /// - string Id { get; } - - /// - /// Identifies additional agent capabilities. - /// - AgentCapability Capabilities { get; } - - /// - /// Unix timestamp (in seconds) for when the agent was created - /// - long CreatedAt { get; } - - /// - /// Name of the agent - /// - string? Name { get; } - - /// - /// The description of the agent - /// - string? Description { get; } - - /// - /// ID of the model to use - /// - string Model { get; } - - /// - /// The system instructions that the agent uses - /// - string Instructions { get; } - - /// - /// Identifiers of files associated with agent. - /// - IEnumerable FileIds { get; } - - /// - /// Tools defined for run execution. - /// - KernelPluginCollection Plugins { get; } - - /// - /// A semantic-kernel instance associated with the agent. - /// - internal Kernel Kernel { get; } - - /// - /// Internal tools model. - /// - internal IEnumerable Tools { get; } - - /// - /// Expose the agent as a plugin. - /// - AgentPlugin AsPlugin(); - - /// - /// Expose the agent internally as a prompt-template - /// - internal IPromptTemplate AsPromptTemplate(); - - /// - /// Creates a new agent chat thread. - /// - /// A cancellation token - Task NewThreadAsync(CancellationToken cancellationToken = default); - - /// - /// Gets an existing agent chat thread. - /// - /// The id of the existing chat thread. - /// A cancellation token - Task GetThreadAsync(string id, CancellationToken cancellationToken = default); - - /// - /// Deletes an existing agent chat thread. - /// - /// The id of the existing chat thread. Allows for null-fallthrough to simplify caller patterns. - /// A cancellation token - Task DeleteThreadAsync(string? id, CancellationToken cancellationToken = default); - - /// - /// Associate uploaded file with the agent, by identifier. - /// - /// The identifier of the uploaded file. - /// A cancellation token - Task AddFileAsync(string fileId, CancellationToken cancellationToken = default); - - /// - /// Remove association of uploaded file with the agent, by identifier. - /// - /// The identifier of the uploaded file. - /// A cancellation token - Task RemoveFileAsync(string fileId, CancellationToken cancellationToken = default); - - /// - /// Delete current agent. Terminal state - Unable to perform any - /// subsequent actions. - /// - /// A cancellation token - Task DeleteAsync(CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/Experimental/Agents/IAgentExtensions.cs b/dotnet/src/Experimental/Agents/IAgentExtensions.cs deleted file mode 100644 index 9344043c2bea..000000000000 --- a/dotnet/src/Experimental/Agents/IAgentExtensions.cs +++ /dev/null @@ -1,44 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Convenience actions for . -/// -public static class IAgentExtensions -{ - /// - /// Invoke agent with user input - /// - /// the agent - /// the user input - /// Optional arguments for parameterized instructions - /// an array of up to 10 file ids to reference for the message - /// Optional cancellation token - /// Chat messages - public static async IAsyncEnumerable InvokeAsync( - this IAgent agent, - string input, - KernelArguments? arguments = null, - IEnumerable? fileIds = null, - [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - IAgentThread thread = await agent.NewThreadAsync(cancellationToken).ConfigureAwait(false); - try - { - await foreach (var message in thread.InvokeAsync(agent, input, arguments, fileIds, cancellationToken).ConfigureAwait(false)) - { - yield return message; - } - } - finally - { - await thread.DeleteAsync(cancellationToken).ConfigureAwait(false); - } - } -} diff --git a/dotnet/src/Experimental/Agents/IAgentThread.cs b/dotnet/src/Experimental/Agents/IAgentThread.cs deleted file mode 100644 index 12bcfe33ed3e..000000000000 --- a/dotnet/src/Experimental/Agents/IAgentThread.cs +++ /dev/null @@ -1,69 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Represents a thread that contains messages. -/// -public interface IAgentThread -{ - /// - /// The thread identifier (which can be referenced in API endpoints). - /// - string Id { get; } - - /// - /// Allow the provided to - /// to be passed through to any function calling. - /// - bool EnableFunctionArgumentPassThrough { get; set; } - - /// - /// Add a textual user message to the thread. - /// - /// The user message - /// up to 10 file ids to reference for the message - /// A cancellation token - /// - Task AddUserMessageAsync(string message, IEnumerable? fileIds = null, CancellationToken cancellationToken = default); - - /// - /// Retrieve thread messages in descending order (most recent first). - /// - /// The maximum number of messages requested - /// The identifier of the last message retrieved - /// A cancellation token - /// An list of . - Task> GetMessagesAsync(int? count = null, string? lastMessageId = null, CancellationToken cancellationToken = default); - - /// - /// Advance the thread with the specified agent. - /// - /// An agent instance. - /// Optional arguments for parameterized instructions - /// A cancellation token - /// The resulting agent message(s) - IAsyncEnumerable InvokeAsync(IAgent agent, KernelArguments? arguments = null, CancellationToken cancellationToken = default); - - /// - /// Advance the thread with the specified agent. - /// - /// An agent instance. - /// The user message - /// Optional arguments for parameterized instructions - /// up to 10 file ids to reference for the message - /// A cancellation token - /// The resulting agent message(s) - IAsyncEnumerable InvokeAsync(IAgent agent, string userMessage, KernelArguments? arguments = null, IEnumerable? fileIds = null, CancellationToken cancellationToken = default); - - /// - /// Delete current thread. Terminal state - Unable to perform any - /// subsequent actions. - /// - /// A cancellation token - Task DeleteAsync(CancellationToken cancellationToken = default); -} diff --git a/dotnet/src/Experimental/Agents/IChatMessage.cs b/dotnet/src/Experimental/Agents/IChatMessage.cs deleted file mode 100644 index 366058112a7a..000000000000 --- a/dotnet/src/Experimental/Agents/IChatMessage.cs +++ /dev/null @@ -1,94 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Collections.ObjectModel; - -namespace Microsoft.SemanticKernel.Experimental.Agents; - -/// -/// Describes the message content type -/// -public enum ChatMessageType -{ - /// - /// A message with text content - /// - Text, - - /// - /// A message that references an image by file-id - /// - Image, -} - -/// -/// Represents a message that is part of an agent thread. -/// -public interface IChatMessage -{ - /// - /// The message identifier (which can be referenced in API endpoints). - /// - string Id { get; } - - /// - /// The id of the agent associated with the a message where role = "agent", otherwise null. - /// - string? AgentId { get; } - - /// - /// Describes the content-type of the message - /// - ChatMessageType ContentType { get; } - - /// - /// The chat message content. - /// - string Content { get; } - - /// - /// The role associated with the chat message. - /// - string Role { get; } - - /// - /// Annotations associated with the message. - /// - IList Annotations { get; } - - /// - /// Properties associated with the message. - /// - ReadOnlyDictionary Properties { get; } - - /// - /// Defines message annotation. - /// - interface IAnnotation - { - /// - /// The file identifier. - /// - string FileId { get; } - - /// - /// The text in the message content that needs to be replaced. - /// - string Label { get; } - - /// - /// The citation. - /// - string? Quote { get; } - - /// - /// Start index of the citation. - /// - int StartIndex { get; } - - /// - /// End index of the citation. - /// - int EndIndex { get; } - } -} diff --git a/dotnet/src/Experimental/Agents/Internal/Agent.cs b/dotnet/src/Experimental/Agents/Internal/Agent.cs deleted file mode 100644 index ae64af04d39a..000000000000 --- a/dotnet/src/Experimental/Agents/Internal/Agent.cs +++ /dev/null @@ -1,310 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Text.RegularExpressions; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; -using Microsoft.SemanticKernel.Experimental.Agents.Models; -using Microsoft.SemanticKernel.PromptTemplates.Handlebars; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; - -/// -/// Represents an agent that can call the model and use tools. -/// -internal sealed class Agent : IAgent -{ - public const string ToolCodeInterpreter = "code_interpreter"; - public const string ToolRetrieval = "retrieval"; - - /// - public string Id => this._model.Id; - - /// - public Kernel Kernel { get; } - - /// - public KernelPluginCollection Plugins => this.Kernel.Plugins; - - /// - public AgentCapability Capabilities { get; } - - /// - public long CreatedAt => this._model.CreatedAt; - - /// - public string? Name => this._model.Name; - - /// - public string? Description => this._model.Description; - - /// - public string Model => this._model.Model; - - /// - public string Instructions => this._model.Instructions; - - /// - public IEnumerable Tools => this._tools; - - /// - public IEnumerable FileIds => this._fileIds.AsEnumerable(); - - private static readonly Regex s_removeInvalidCharsRegex = new("[^0-9A-Za-z-]"); - - private static readonly Dictionary s_templateFactories = - new(StringComparer.OrdinalIgnoreCase) - { - { PromptTemplateConfig.SemanticKernelTemplateFormat, new KernelPromptTemplateFactory() }, - { HandlebarsPromptTemplateFactory.HandlebarsTemplateFormat, new HandlebarsPromptTemplateFactory() }, - }; - - private readonly OpenAIRestContext _restContext; - private readonly AssistantModel _model; - private readonly IPromptTemplate _promptTemplate; - private readonly ToolModel[] _tools; - private readonly HashSet _fileIds; - - private AgentPlugin? _agentPlugin; - private bool _isDeleted; - - /// - /// Create a new agent. - /// - /// A context for accessing OpenAI REST endpoint - /// The assistant definition - /// The template config - /// Plugins to initialize as agent tools - /// A cancellation token - /// An initialized instance. - public static async Task CreateAsync( - OpenAIRestContext restContext, - AssistantModel assistantModel, - PromptTemplateConfig? config, - IEnumerable? plugins = null, - CancellationToken cancellationToken = default) - { - var resultModel = await restContext.CreateAssistantModelAsync(assistantModel, cancellationToken).ConfigureAwait(false); - - return new Agent(resultModel, config, restContext, plugins); - } - - /// - /// Initializes a new instance of the class. - /// - internal Agent( - AssistantModel assistantModel, - PromptTemplateConfig? config, - OpenAIRestContext restContext, - IEnumerable? plugins = null) - { - config ??= - new PromptTemplateConfig - { - Name = assistantModel.Name, - Description = assistantModel.Description, - Template = assistantModel.Instructions, - }; - - this._model = assistantModel; - this._restContext = restContext; - this._promptTemplate = this.DefinePromptTemplate(config); - this._fileIds = new HashSet(assistantModel.FileIds, StringComparer.OrdinalIgnoreCase); - - IKernelBuilder builder = Kernel.CreateBuilder(); - - this.Kernel = - this._restContext.HasVersion ? - builder.AddAzureOpenAIChatCompletion(this._model.Model, this.GetAzureRootEndpoint(), this._restContext.ApiKey).Build() : - builder.AddOpenAIChatCompletion(this._model.Model, this._restContext.ApiKey).Build(); - - if (plugins is not null) - { - this.Kernel.Plugins.AddRange(plugins); - } - - this.Capabilities = - (this.Kernel.Plugins.Count > 0 ? AgentCapability.Functions : AgentCapability.None) | - (this._model.Tools.Any(t => string.Equals(t.Type, ToolRetrieval, StringComparison.OrdinalIgnoreCase)) ? AgentCapability.Retrieval : AgentCapability.None) | - (this._model.Tools.Any(t => string.Equals(t.Type, ToolCodeInterpreter, StringComparison.OrdinalIgnoreCase)) ? AgentCapability.CodeInterpreter : AgentCapability.None); - - this._tools = this._model.Tools.Concat(this.Kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolModel(p.Name)))).ToArray(); - } - - public AgentPlugin AsPlugin() => this._agentPlugin ??= this.DefinePlugin(); - - public IPromptTemplate AsPromptTemplate() => this._promptTemplate; - - /// - public Task NewThreadAsync(CancellationToken cancellationToken = default) - { - this.ThrowIfDeleted(); - - return ChatThread.CreateAsync(this._restContext, cancellationToken); - } - - /// - public Task GetThreadAsync(string id, CancellationToken cancellationToken = default) - { - this.ThrowIfDeleted(); - - return ChatThread.GetAsync(this._restContext, id, cancellationToken); - } - - /// - public async Task DeleteThreadAsync(string? id, CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(id)) - { - return; - } - - await this._restContext.DeleteThreadModelAsync(id!, cancellationToken).ConfigureAwait(false); - } - - /// - public async Task AddFileAsync(string fileId, CancellationToken cancellationToken = default) - { - if (this._isDeleted) - { - return; - } - - if (this._fileIds.Contains(fileId)) - { - return; - } - - await this._restContext.AddAssistantFileAsync(this.Id, fileId, cancellationToken).ConfigureAwait(false); - - this._fileIds.Add(fileId); - } - - /// - public async Task RemoveFileAsync(string fileId, CancellationToken cancellationToken = default) - { - if (this._isDeleted) - { - return; - } - - if (!this._fileIds.Contains(fileId)) - { - return; - } - - await this._restContext.RemoveAssistantFileAsync(this.Id, fileId, cancellationToken).ConfigureAwait(false); - - this._fileIds.Remove(fileId); - } - - /// - public async Task DeleteAsync(CancellationToken cancellationToken = default) - { - if (this._isDeleted) - { - return; - } - - await this._restContext.DeleteAssistantModelAsync(this.Id, cancellationToken).ConfigureAwait(false); - this._isDeleted = true; - } - - /// - /// Marshal thread run through interface. - /// - /// The user input - /// Arguments for parameterized instructions - /// A cancellation token. - /// An agent response ( - private async Task AskAsync( - [Description("The user message provided to the agent.")] - string input, - KernelArguments arguments, - CancellationToken cancellationToken = default) - { - var thread = await this.NewThreadAsync(cancellationToken).ConfigureAwait(false); - try - { - var messages = await thread.InvokeAsync(this, input, arguments, fileIds: null, cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false); - var response = - new AgentResponse - { - ThreadId = thread.Id, - Message = string.Join(Environment.NewLine, messages.Select(m => m.Content)), - }; - - return response; - } - finally - { - await thread.DeleteAsync(cancellationToken).ConfigureAwait(false); - } - } - - private AgentPluginImpl DefinePlugin() - { - var functionAsk = KernelFunctionFactory.CreateFromMethod(this.AskAsync, description: this.Description); - - return new AgentPluginImpl(this, functionAsk); - } - - private IPromptTemplate DefinePromptTemplate(PromptTemplateConfig config) - { - if (!s_templateFactories.TryGetValue(config.TemplateFormat, out var factory)) - { - factory = new KernelPromptTemplateFactory(); - } - - return factory.Create(config); - } - - private string GetAzureRootEndpoint() - { - var endpointUri = new Uri(this._restContext.Endpoint); - return endpointUri.AbsoluteUri.Replace(endpointUri.AbsolutePath, string.Empty); - } - - private void ThrowIfDeleted() - { - if (this._isDeleted) - { - throw new AgentException($"{nameof(Agent)}: {this.Id} has been deleted."); - } - } - - private sealed class AgentPluginImpl(Agent agent, KernelFunction functionAsk) : - AgentPlugin(s_removeInvalidCharsRegex.Replace(agent.Name ?? agent.Id, string.Empty), - agent.Description ?? agent.Instructions) - { - public KernelFunction FunctionAsk { get; } = functionAsk; - - internal override Agent Agent { get; } = agent; - - public override int FunctionCount => 1; - - private static readonly string s_functionName = nameof(Agent.AskAsync).Substring(0, nameof(AgentPluginImpl.Agent.AskAsync).Length - 5); - - public override IEnumerator GetEnumerator() - { - yield return this.FunctionAsk; - } - - public override bool TryGetFunction(string name, [NotNullWhen(true)] out KernelFunction? function) - { - function = null; - - if (s_functionName.Equals(name, StringComparison.OrdinalIgnoreCase)) - { - function = this.FunctionAsk; - } - - return function is not null; - } - } -} diff --git a/dotnet/src/Experimental/Agents/Internal/ChatMessage.cs b/dotnet/src/Experimental/Agents/Internal/ChatMessage.cs deleted file mode 100644 index e94353837d4b..000000000000 --- a/dotnet/src/Experimental/Agents/Internal/ChatMessage.cs +++ /dev/null @@ -1,74 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Linq; -using Microsoft.SemanticKernel.Experimental.Agents.Models; -using static Microsoft.SemanticKernel.Experimental.Agents.IChatMessage; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; - -/// -/// Represents a message that is part of an agent thread. -/// -internal sealed class ChatMessage : IChatMessage -{ - /// - public string Id { get; } - - /// - public string? AgentId { get; } - - /// - public ChatMessageType ContentType { get; } - - /// - public string Content { get; } - - /// - public string Role { get; } - - /// - public ReadOnlyDictionary Properties { get; } - - public IList Annotations { get; } - - /// - /// Initializes a new instance of the class. - /// - internal ChatMessage(ThreadMessageModel model) - { - var content = model.Content.First(); - - this.Annotations = - content.Text is null ? - Array.Empty() : - content.Text.Annotations.Select(a => new Annotation(a.Text, a.StartIndex, a.EndIndex, a.FileCitation?.FileId ?? a.FilePath!.FileId, a.FileCitation?.Quote)).ToArray(); - - this.Id = model.Id; - this.AgentId = string.IsNullOrWhiteSpace(model.AssistantId) ? null : model.AssistantId; - this.Role = model.Role; - this.ContentType = content.Text is null ? ChatMessageType.Image : ChatMessageType.Text; - this.Content = content.Text?.Value ?? content.Image?.FileId ?? string.Empty; - this.Properties = new ReadOnlyDictionary(model.Metadata); - } - - private sealed class Annotation(string label, int startIndex, int endIndex, string fileId, string? quote) : IAnnotation - { - /// - public string FileId { get; } = fileId; - - /// - public string Label { get; } = label; - - /// - public string? Quote { get; } = quote; - - /// - public int StartIndex { get; } = startIndex; - - /// - public int EndIndex { get; } = endIndex; - } -} diff --git a/dotnet/src/Experimental/Agents/Internal/ChatRun.cs b/dotnet/src/Experimental/Agents/Internal/ChatRun.cs deleted file mode 100644 index 1928f219c903..000000000000 --- a/dotnet/src/Experimental/Agents/Internal/ChatRun.cs +++ /dev/null @@ -1,194 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; -using Microsoft.SemanticKernel.Experimental.Agents.Extensions; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; - -/// -/// Represents an execution run on a thread. -/// -internal sealed class ChatRun -{ - /// - /// ID of this run. - /// - public string Id => this._model.Id; - - /// - /// ID of the assistant used for execution of this run. - /// - public string AgentId => this._model.AssistantId; - - /// - /// ID of the thread that was executed on as a part of this run. - /// - public string ThreadId => this._model.ThreadId; - - /// - /// Optional arguments for injection into function-calling. - /// - public KernelArguments? Arguments { get; init; } - - private const string ActionState = "requires_action"; - private const string CompletedState = "completed"; - private static readonly TimeSpan s_pollingInterval = TimeSpan.FromMilliseconds(500); - private static readonly TimeSpan s_pollingBackoff = TimeSpan.FromSeconds(1); - - private static readonly HashSet s_pollingStates = - new(StringComparer.OrdinalIgnoreCase) - { - "queued", - "in_progress", - "cancelling", - }; - - private static readonly HashSet s_terminalStates = - new(StringComparer.OrdinalIgnoreCase) - { - "expired", - "failed", - "cancelled", - }; - - private readonly OpenAIRestContext _restContext; - private readonly Kernel _kernel; - - private ThreadRunModel _model; - - /// - public async IAsyncEnumerable GetResultAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) - { - var processedMessageIds = new HashSet(); - - do - { - // Poll run and steps until actionable - var steps = await PollRunStatusAsync().ConfigureAwait(false); - - // Is in terminal state? - if (s_terminalStates.Contains(this._model.Status)) - { - throw new AgentException($"Run terminated - {this._model.Status} [{this.Id}]: {this._model.LastError?.Message ?? "Unknown"}"); - } - - // Is tool action required? - if (ActionState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)) - { - // Execute functions in parallel and post results at once. - var tasks = steps.Data.SelectMany(step => this.ExecuteStep(step, cancellationToken)).ToArray(); - if (tasks.Length > 0) - { - var results = await Task.WhenAll(tasks).ConfigureAwait(false); - await this._restContext.AddToolOutputsAsync(this.ThreadId, this.Id, results, cancellationToken).ConfigureAwait(false); - } - } - - // Enumerate completed messages - var newMessageIds = - steps.Data - .Where(s => s.StepDetails.MessageCreation is not null) - .Select(s => (s.StepDetails.MessageCreation!.MessageId, s.CompletedAt)) - .Where(t => !processedMessageIds.Contains(t.MessageId)) - .OrderBy(t => t.CompletedAt) - .Select(t => t.MessageId); - - foreach (var messageId in newMessageIds) - { - processedMessageIds.Add(messageId); - yield return messageId; - } - } - while (!CompletedState.Equals(this._model.Status, StringComparison.OrdinalIgnoreCase)); - - async Task PollRunStatusAsync() - { - int count = 0; - - do - { - // Reduce polling frequency after a couple attempts - await Task.Delay(count >= 2 ? s_pollingInterval : s_pollingBackoff, cancellationToken).ConfigureAwait(false); - ++count; - - try - { - this._model = await this._restContext.GetRunAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); - } - catch (Exception exception) when (!exception.IsCriticalException()) - { - // Retry anyway.. - } - } - while (s_pollingStates.Contains(this._model.Status)); - - return await this._restContext.GetRunStepsAsync(this.ThreadId, this.Id, cancellationToken).ConfigureAwait(false); - } - } - - /// - /// Initializes a new instance of the class. - /// - internal ChatRun( - ThreadRunModel model, - Kernel kernel, - OpenAIRestContext restContext) - { - this._model = model; - this._kernel = kernel; - this._restContext = restContext; - } - - private IEnumerable> ExecuteStep(ThreadRunStepModel step, CancellationToken cancellationToken) - { - // Process all of the steps that require action - if (step.Status == "in_progress" && step.StepDetails.Type == "tool_calls") - { - foreach (var toolCall in step.StepDetails.ToolCalls) - { - // Run function - yield return this.ProcessFunctionStepAsync(toolCall.Id, toolCall.Function, cancellationToken); - } - } - } - - private async Task ProcessFunctionStepAsync(string callId, ThreadRunStepModel.FunctionDetailsModel functionDetails, CancellationToken cancellationToken) - { - var result = await InvokeFunctionCallAsync().ConfigureAwait(false); - var toolResult = result as string ?? JsonSerializer.Serialize(result); - - return - new ToolResultModel - { - CallId = callId, - Output = toolResult!, - }; - - async Task InvokeFunctionCallAsync() - { - var function = this._kernel.GetAssistantTool(functionDetails.Name); - - var functionArguments = new KernelArguments(this.Arguments ?? []); - if (!string.IsNullOrWhiteSpace(functionDetails.Arguments)) - { - var arguments = JsonSerializer.Deserialize>(functionDetails.Arguments)!; - foreach (var argument in arguments) - { - functionArguments[argument.Key] = argument.Value.ToString(); - } - } - - var result = await function.InvokeAsync(this._kernel, functionArguments, cancellationToken).ConfigureAwait(false); - - return result.GetValue() ?? string.Empty; - } - } -} diff --git a/dotnet/src/Experimental/Agents/Internal/ChatThread.cs b/dotnet/src/Experimental/Agents/Internal/ChatThread.cs deleted file mode 100644 index 1b395ccd970d..000000000000 --- a/dotnet/src/Experimental/Agents/Internal/ChatThread.cs +++ /dev/null @@ -1,140 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Collections.Generic; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel.Experimental.Agents.Exceptions; -using Microsoft.SemanticKernel.Experimental.Agents.Models; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; - -/// -/// Represents a thread that contains messages. -/// -internal sealed class ChatThread : IAgentThread -{ - /// - public string Id { get; private set; } - - /// - public bool EnableFunctionArgumentPassThrough { get; set; } - - private readonly OpenAIRestContext _restContext; - private bool _isDeleted; - - /// - /// Create a new thread. - /// - /// A context for accessing OpenAI REST endpoint - /// A cancellation token - /// An initialized instance. - public static async Task CreateAsync(OpenAIRestContext restContext, CancellationToken cancellationToken = default) - { - // Common case is for failure exception to be raised by REST invocation. Null result is a logical possibility, but unlikely edge case. - var threadModel = await restContext.CreateThreadModelAsync(cancellationToken).ConfigureAwait(false); - - return new ChatThread(threadModel, restContext); - } - - /// - /// Retrieve an existing thread. - /// - /// A context for accessing OpenAI REST endpoint - /// The thread identifier - /// A cancellation token - /// An initialized instance. - public static async Task GetAsync(OpenAIRestContext restContext, string threadId, CancellationToken cancellationToken = default) - { - var threadModel = await restContext.GetThreadModelAsync(threadId, cancellationToken).ConfigureAwait(false); - - return new ChatThread(threadModel, restContext); - } - - /// - public async Task AddUserMessageAsync(string message, IEnumerable? fileIds = null, CancellationToken cancellationToken = default) - { - this.ThrowIfDeleted(); - - var messageModel = await this._restContext.CreateUserTextMessageAsync(this.Id, message, fileIds, cancellationToken).ConfigureAwait(false); - - return new ChatMessage(messageModel); - } - - /// - public async Task> GetMessagesAsync(int? count = null, string? lastMessageId = null, CancellationToken cancellationToken = default) - { - var messageModel = await this._restContext.GetMessagesAsync(this.Id, lastMessageId, count, cancellationToken).ConfigureAwait(false); - - return messageModel.Data.Select(m => new ChatMessage(m)).ToArray(); - } - - /// - public IAsyncEnumerable InvokeAsync(IAgent agent, KernelArguments? arguments = null, CancellationToken cancellationToken = default) - { - return this.InvokeAsync(agent, string.Empty, arguments, null, cancellationToken); - } - - /// - public async IAsyncEnumerable InvokeAsync(IAgent agent, string userMessage, KernelArguments? arguments = null, IEnumerable? fileIds = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) - { - this.ThrowIfDeleted(); - - if (!string.IsNullOrWhiteSpace(userMessage)) - { - yield return await this.AddUserMessageAsync(userMessage, fileIds, cancellationToken).ConfigureAwait(false); - } - - // Finalize prompt / agent instructions using provided parameters. - var instructions = await agent.AsPromptTemplate().RenderAsync(agent.Kernel, arguments, cancellationToken).ConfigureAwait(false); - - // Create run using templated prompt - var runModel = await this._restContext.CreateRunAsync(this.Id, agent.Id, instructions, agent.Tools, cancellationToken).ConfigureAwait(false); - var run = - new ChatRun(runModel, agent.Kernel, this._restContext) - { - Arguments = this.EnableFunctionArgumentPassThrough ? arguments : null, - }; - - await foreach (var messageId in run.GetResultAsync(cancellationToken).ConfigureAwait(false)) - { - var message = await this._restContext.GetMessageAsync(this.Id, messageId, cancellationToken).ConfigureAwait(false); - yield return new ChatMessage(message); - } - } - - /// - /// Delete an existing thread. - /// - /// A cancellation token - public async Task DeleteAsync(CancellationToken cancellationToken) - { - if (this._isDeleted) - { - return; - } - - await this._restContext.DeleteThreadModelAsync(this.Id, cancellationToken).ConfigureAwait(false); - this._isDeleted = true; - } - - /// - /// Initializes a new instance of the class. - /// - private ChatThread( - ThreadModel threadModel, - OpenAIRestContext restContext) - { - this.Id = threadModel.Id; - this._restContext = restContext; - } - - private void ThrowIfDeleted() - { - if (this._isDeleted) - { - throw new AgentException($"{nameof(ChatThread)}: {this.Id} has been deleted."); - } - } -} diff --git a/dotnet/src/Experimental/Agents/Internal/OpenAIRestContext.cs b/dotnet/src/Experimental/Agents/Internal/OpenAIRestContext.cs deleted file mode 100644 index 33fe3fc7ff47..000000000000 --- a/dotnet/src/Experimental/Agents/Internal/OpenAIRestContext.cs +++ /dev/null @@ -1,48 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Internal; - -/// -/// Placeholder context. -/// -internal sealed class OpenAIRestContext(string endpoint, string apiKey, string? version, Func? clientFactory = null) -{ - private static readonly HttpClient s_defaultOpenAIClient = new(); - - /// - /// The service API key. - /// - public string ApiKey { get; } = apiKey; - - /// - /// The service endpoint. - /// - public string Endpoint { get; } = endpoint; - - /// - /// Is the version defined? - /// - public bool HasVersion { get; } = !string.IsNullOrEmpty(version); - - /// - /// The optional API version. - /// - public string? Version { get; } = version; - - /// - /// Accessor for the http client. - /// - public HttpClient GetHttpClient() => this._clientFactory.Invoke(); - - private readonly Func _clientFactory = clientFactory ??= () => s_defaultOpenAIClient; - - /// - /// Initializes a new instance of the class. - /// - public OpenAIRestContext(string endpoint, string apiKey, Func? clientFactory = null) - : this(endpoint, apiKey, version: null, clientFactory) - { } -} diff --git a/dotnet/src/Experimental/Agents/Models/AssistantModel.cs b/dotnet/src/Experimental/Agents/Models/AssistantModel.cs deleted file mode 100644 index 8fb57b65d418..000000000000 --- a/dotnet/src/Experimental/Agents/Models/AssistantModel.cs +++ /dev/null @@ -1,107 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// list of run steps belonging to a run. -/// -internal sealed class AssistantListModel : OpenAIListModel -{ - // No specialization -} - -/// -/// Model of Assistant data returned from OpenAI -/// -internal sealed record AssistantModel -{ - /// - /// Identifier, which can be referenced in API endpoints - /// - [JsonPropertyName("id")] - public string Id { get; init; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the assistant was created - /// - [JsonPropertyName("created_at")] - public long CreatedAt { get; init; } - - /// - /// Name of the assistant - /// - [JsonPropertyName("name")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Name { get; set; } - - /// - /// The description of the assistant - /// - [JsonPropertyName("description")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Description { get; set; } - - /// - /// ID of the model to use - /// - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - /// - /// The system instructions that the assistant uses - /// - [JsonPropertyName("instructions")] - public string Instructions { get; set; } = string.Empty; - - /// - /// A list of tool enabled on the assistant - /// There can be a maximum of 128 tools per assistant. - /// - [JsonPropertyName("tools")] - public List Tools { get; init; } = []; - - /// - /// A list of file IDs attached to this assistant. - /// There can be a maximum of 20 files attached to the assistant. - /// - [JsonPropertyName("file_ids")] - public List FileIds { get; init; } = []; - - /// - /// Set of 16 key-value pairs that can be attached to an object. - /// This can be useful for storing additional information about the - /// object in a structured format. - /// Keys can be a maximum of 64 characters long and values can be a - /// maximum of 512 characters long. - /// - [JsonPropertyName("metadata")] - public Dictionary Metadata { get; init; } = []; - - /// - /// Assistant file model. - /// - public sealed class FileModel - { - /// - /// ID of the assistant. - /// - [JsonPropertyName("assistant_id")] - public string AssistantId { get; set; } = string.Empty; - - /// - /// ID of the uploaded file. - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the assistant was created - /// - [JsonPropertyName("created_at")] - public long CreatedAt { get; init; } - } -} diff --git a/dotnet/src/Experimental/Agents/Models/OpenAIListModel.cs b/dotnet/src/Experimental/Agents/Models/OpenAIListModel.cs deleted file mode 100644 index 199286fd3717..000000000000 --- a/dotnet/src/Experimental/Agents/Models/OpenAIListModel.cs +++ /dev/null @@ -1,37 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// list of run steps belonging to a run. -/// -internal abstract class OpenAIListModel -{ - /// - /// List of steps. - /// - [JsonPropertyName("data")] - public List Data { get; set; } = []; - - /// - /// The identifier of the first data record. - /// - [JsonPropertyName("first_id")] - public string FirstId { get; set; } = string.Empty; - - /// - /// The identifier of the last data record. - /// - [JsonPropertyName("last_id")] - public string LastId { get; set; } = string.Empty; - - /// - /// Indicates of more pages of data exist. - /// - [JsonPropertyName("has_more")] - public bool HasMore { get; set; } -} diff --git a/dotnet/src/Experimental/Agents/Models/OpenAIParameters.cs b/dotnet/src/Experimental/Agents/Models/OpenAIParameters.cs deleted file mode 100644 index 69ac459e4c5b..000000000000 --- a/dotnet/src/Experimental/Agents/Models/OpenAIParameters.cs +++ /dev/null @@ -1,56 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// Wrapper for parameter map. -/// -internal sealed class OpenAIParameters -{ - /// - /// Empty parameter set. - /// - public static readonly OpenAIParameters Empty = new(); - - /// - /// Always "object" - /// - [JsonPropertyName("type")] - public string Type { get; set; } = "object"; - - /// - /// Set of parameters. - /// - [JsonPropertyName("properties")] - public Dictionary Properties { get; set; } = []; - - /// - /// Set of parameters. - /// - [JsonPropertyName("required")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public List? Required { get; set; } -} - -/// -/// Wrapper for parameter definition. -/// -internal sealed class OpenAIParameter -{ - /// - /// The parameter type. - /// - [JsonPropertyName("type")] - public string Type { get; set; } = "object"; - - /// - /// The parameter description. - /// - [JsonPropertyName("description")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Description { get; set; } -} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadMessageModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadMessageModel.cs deleted file mode 100644 index cde59d5caaf0..000000000000 --- a/dotnet/src/Experimental/Agents/Models/ThreadMessageModel.cs +++ /dev/null @@ -1,196 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 -#pragma warning disable CA1852 - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// list of run steps belonging to a run. -/// -internal sealed class ThreadMessageListModel : OpenAIListModel -{ - // No specialization -} - -/// -/// Represents a message within a thread. -/// -internal sealed class ThreadMessageModel -{ - /// - /// Identifier, which can be referenced in API endpoints. - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the message was created. - /// - [JsonPropertyName("created_at")] - public long CreatedAt { get; set; } - - /// - /// The thread ID that this message belongs to. - /// - [JsonPropertyName("thread_id")] - public string ThreadId { get; set; } = string.Empty; - - /// - /// The entity that produced the message. One of "user" or "assistant". - /// - [JsonPropertyName("role")] - public string Role { get; set; } = string.Empty; - - /// - /// The content of the message in array of text and/or images. - /// - [JsonPropertyName("content")] - public List Content { get; set; } = []; - - /// - /// A list of file IDs that the agent should use. - /// - [JsonPropertyName("file_ids")] - public List FileIds { get; set; } = []; - - /// - /// If applicable, the ID of the assistant that authored this message. - /// - [JsonPropertyName("assistant_id")] - public string AssistantId { get; set; } = string.Empty; - - /// - /// If applicable, the ID of the run associated with the authoring of this message. - /// - [JsonPropertyName("run_id")] - public string RunId { get; set; } = string.Empty; - - /// - /// Set of 16 key-value pairs that can be attached to an object. - /// This can be useful for storing additional information about the - /// object in a structured format. Keys can be a maximum of 64 - /// characters long and values can be a maximum of 512 characters long. - /// - [JsonPropertyName("metadata")] - public Dictionary Metadata { get; set; } = []; - - /// - /// Representa contents within a message. - /// - public sealed class ContentModel - { - /// - /// Type of content. - /// - [JsonPropertyName("type")] - public string Type { get; set; } = string.Empty; - - /// - /// Text context. - /// - [JsonPropertyName("image_file")] - public ImageContentModel? Image { get; set; } - - /// - /// Text context. - /// - [JsonPropertyName("text")] - public TextContentModel? Text { get; set; } - } - - /// - /// Text content. - /// - public sealed class ImageContentModel - { - /// - /// The image file identifier. - /// - [JsonPropertyName("file_id")] - public string FileId { get; set; } = string.Empty; - } - - /// - /// Text content. - /// - public sealed class TextContentModel - { - /// - /// The text itself. - /// - [JsonPropertyName("value")] - public string Value { get; set; } = string.Empty; - - /// - /// Any annotations on the text. - /// - [JsonPropertyName("annotations")] - public List Annotations { get; set; } = []; - } - - public sealed class TextAnnotationModel - { - /// - /// Type of content. - /// - [JsonPropertyName("type")] - public string Type { get; set; } = string.Empty; - - /// - /// The text of the citation-label text in the message content that can be replaced/reformatted. - /// - [JsonPropertyName("text")] - public string Text { get; set; } = string.Empty; - - /// - /// Annotation when type == "file_citation" - /// - [JsonPropertyName("file_citation")] - public TextFileCitationModel? FileCitation { get; set; } - - /// - /// Annotation when type == "file_path" - /// - [JsonPropertyName("file_path")] - public TextFilePathModel? FilePath { get; set; } - - /// - /// Start index of the citation. - /// - [JsonPropertyName("start_index")] - public int StartIndex { get; set; } - - /// - /// End index of the citation. - /// - [JsonPropertyName("end_index")] - public int EndIndex { get; set; } - } - - public sealed class TextFileCitationModel - { - /// - /// The file identifier. - /// - [JsonPropertyName("file_id")] - public string FileId { get; set; } = string.Empty; - - /// - /// The citation. - /// - [JsonPropertyName("quote")] - public string Quote { get; set; } = string.Empty; - } - - public sealed class TextFilePathModel - { - /// - /// The file identifier. - /// - [JsonPropertyName("file_id")] - public string FileId { get; set; } = string.Empty; - } -} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadModel.cs deleted file mode 100644 index 0fa72520a527..000000000000 --- a/dotnet/src/Experimental/Agents/Models/ThreadModel.cs +++ /dev/null @@ -1,34 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// Model of Thread data returned from OpenAI -/// -internal sealed class ThreadModel -{ - /// - /// Identifier, which can be referenced in API endpoints. - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// The Unix timestamp (in seconds) for when the thread was created. - /// - [JsonPropertyName("created_at")] - public int CreatedAt { get; set; } - - /// - /// Set of 16 key-value pairs that can be attached to an object. - /// This can be useful for storing additional information about the - /// object in a structured format. Keys can be a maximum of 64 - /// characters long and values can be a maximum of 512 characters long. - /// - [JsonPropertyName("metadata")] - public Dictionary Metadata { get; set; } = []; -} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadRunModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadRunModel.cs deleted file mode 100644 index 45cf1606cdd0..000000000000 --- a/dotnet/src/Experimental/Agents/Models/ThreadRunModel.cs +++ /dev/null @@ -1,131 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 - -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// Represents an execution run on a thread. -/// -internal sealed class ThreadRunModel -{ - /// - /// Identifier, which can be referenced in API endpoints. - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the run was created. - /// - [JsonPropertyName("created_at")] - public long CreatedAt { get; set; } - - /// - /// ID of the assistant used for execution of this run. - /// - [JsonPropertyName("assistant_id")] - public string AssistantId { get; set; } = string.Empty; - - /// - /// ID of the thread that was executed on as a part of this run. - /// - [JsonPropertyName("thread_id")] - public string ThreadId { get; set; } = string.Empty; - - /// - /// The status of the run, which can be one of: - /// queued, in_progress, requires_action, cancelling, cancelled, failed, completed, or expired. - /// - [JsonPropertyName("status")] - public string Status { get; set; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the run was started. - /// - [JsonPropertyName("started_at")] - public long? StartedAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run will expire. - /// - [JsonPropertyName("expires_at")] - public long? ExpiresAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run was cancelled. - /// - [JsonPropertyName("cancelled_at")] - public long? CancelledAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run failed. - /// - [JsonPropertyName("failed_at")] - public long? FailedAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run was completed. - /// - [JsonPropertyName("completed_at")] - public long? CompletedAt { get; set; } - - /// - /// The last error associated with this run. Will be null if there are no errors. - /// - [JsonPropertyName("last_error")] - public ErrorModel? LastError { get; set; } - - /// - /// The model that the assistant used for this run. - /// - [JsonPropertyName("model")] - public string Model { get; set; } = string.Empty; - - /// - /// The instructions that the assistant used for this run. - /// - [JsonPropertyName("instructions")] - public string Instructions { get; set; } = string.Empty; - - /// - /// The list of tools that the assistant used for this run. - /// - [JsonPropertyName("tools")] - public List Tools { get; set; } = []; - - /// - /// The list of File IDs the assistant used for this run. - /// - [JsonPropertyName("file_ids")] - public List FileIds { get; set; } = []; - - /// - /// Set of 16 key-value pairs that can be attached to an object. - /// This can be useful for storing additional information about the - /// object in a structured format. Keys can be a maximum of 64 - /// characters long and values can be a maximum of 512 characters long. - /// - [JsonPropertyName("metadata")] - public Dictionary Metadata { get; set; } = []; - - /// - /// Run error information. - /// - public sealed class ErrorModel - { - /// - /// Error code. - /// - [JsonPropertyName("code")] - public string Code { get; set; } = string.Empty; - - /// - /// Error message. - /// - [JsonPropertyName("message")] - public string Message { get; set; } = string.Empty; - } -} diff --git a/dotnet/src/Experimental/Agents/Models/ThreadRunStepModel.cs b/dotnet/src/Experimental/Agents/Models/ThreadRunStepModel.cs deleted file mode 100644 index aa647c75e7ea..000000000000 --- a/dotnet/src/Experimental/Agents/Models/ThreadRunStepModel.cs +++ /dev/null @@ -1,195 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// list of run steps belonging to a run. -/// -internal sealed class ThreadRunStepListModel : OpenAIListModel -{ - // No specialization -} - -/// -/// Step in a run on a thread. -/// -internal sealed class ThreadRunStepModel -{ - /// - /// Identifier of the run step, which can be referenced in API endpoints. - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the run step was created. - /// - [JsonPropertyName("created_at")] - public long CreatedAt { get; set; } - - /// - /// The ID of the run to which the run step belongs. - /// - [JsonPropertyName("run_id")] - public string RunId { get; set; } = string.Empty; - - /// - /// ID of the assistant associated with the run step. - /// - [JsonPropertyName("assistant_id")] - public string AssistantId { get; set; } = string.Empty; - - /// - /// The ID of the thread to which the run and run step belongs. - /// - [JsonPropertyName("thread_id")] - public string ThreadId { get; set; } = string.Empty; - - /// - /// The type of run step, which can be either message_creation or tool_calls. - /// - [JsonPropertyName("type")] - public string Type { get; set; } = string.Empty; - - /// - /// The status of the run step, which can be one of: - /// in_progress, cancelled, failed, completed, or expired. - /// - [JsonPropertyName("status")] - public string Status { get; set; } = string.Empty; - - /// - /// Unix timestamp (in seconds) for when the run step was cancelled. - /// - [JsonPropertyName("cancelled_at")] - public long? CancelledAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run step completed. - /// - [JsonPropertyName("completed_at")] - public long? CompletedAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run step expired. - /// A step is considered expired if the parent run is expired. - /// - [JsonPropertyName("expired_at")] - public long? ExpiredAt { get; set; } - - /// - /// Unix timestamp (in seconds) for when the run step failed. - /// - [JsonPropertyName("failed_at")] - public long? FailedAt { get; set; } - - /// - /// The last error associated with this run step. Will be null if there are no errors. - /// - [JsonPropertyName("last_error")] - public string LastError { get; set; } = string.Empty; - - /// - /// The details of the run step. - /// - [JsonPropertyName("step_details")] - public StepDetailsModel StepDetails { get; set; } = StepDetailsModel.Empty; - - /// - /// Details of a run step. - /// - public sealed class StepDetailsModel - { - /// - /// Empty definition - /// - public static StepDetailsModel Empty = new(); - - /// - /// Type of detail. - /// - [JsonPropertyName("type")] - public string Type { get; set; } = string.Empty; - - /// - /// Details of the message creation by the run step. - /// - [JsonPropertyName("message_creation")] - public MessageCreationDetailsModel? MessageCreation { get; set; } - - /// - /// Details of tool calls. - /// - [JsonPropertyName("tool_calls")] - public ToolCallsDetailsModel[] ToolCalls { get; set; } = []; - } - - /// - /// Message creation details. - /// - public sealed class MessageCreationDetailsModel - { - /// - /// ID of the message that was created by this run step. - /// - [JsonPropertyName("message_id")] - public string MessageId { get; set; } = string.Empty; - } - - /// - /// Tool call details. - /// - public sealed class ToolCallsDetailsModel - { - /// - /// ID of the tool call. - /// - [JsonPropertyName("id")] - public string Id { get; set; } = string.Empty; - - /// - /// The type of tool call. - /// - [JsonPropertyName("type")] - public string Type { get; set; } = string.Empty; - - /// - /// The definition of the function that was called. - /// - [JsonPropertyName("function")] - public FunctionDetailsModel Function { get; set; } = FunctionDetailsModel.Empty; - } - - /// - /// Function call details. - /// - public sealed class FunctionDetailsModel - { - /// - /// Empty definition - /// - public static FunctionDetailsModel Empty = new(); - - /// - /// The name of the function. - /// - [JsonPropertyName("name")] - public string Name { get; set; } = string.Empty; - - /// - /// The arguments passed to the function. - /// - [JsonPropertyName("arguments")] - public string Arguments { get; set; } = string.Empty; - - /// - /// The output of the function. - /// This will be null if the outputs have not been submitted yet. - /// - [JsonPropertyName("output")] - public string Output { get; set; } = string.Empty; - } -} diff --git a/dotnet/src/Experimental/Agents/Models/ToolModel.cs b/dotnet/src/Experimental/Agents/Models/ToolModel.cs deleted file mode 100644 index d23dd0bf647d..000000000000 --- a/dotnet/src/Experimental/Agents/Models/ToolModel.cs +++ /dev/null @@ -1,49 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -/// -/// Tool entry -/// -internal sealed record ToolModel -{ - /// - /// Type of tool to have at agent's disposition - /// - [JsonPropertyName("type")] - public string Type { get; init; } = string.Empty; - - /// - /// The function definition for Type = 'function'. - /// - [JsonPropertyName("function")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public FunctionModel? Function { get; init; } - - /// - /// Defines the function when ToolModel.Type == 'function'. - /// - public sealed record FunctionModel - { - /// - /// The function name. - /// - [JsonPropertyName("name")] - public string Name { get; init; } = string.Empty; - - /// - /// The function description. - /// - [JsonPropertyName("description")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Description { get; init; } - - /// - /// The function description. - /// - [JsonPropertyName("parameters")] - public OpenAIParameters Parameters { get; init; } = OpenAIParameters.Empty; - } -} diff --git a/dotnet/src/Experimental/Agents/Models/ToolResultModel.cs b/dotnet/src/Experimental/Agents/Models/ToolResultModel.cs deleted file mode 100644 index 3c4ef062cf70..000000000000 --- a/dotnet/src/Experimental/Agents/Models/ToolResultModel.cs +++ /dev/null @@ -1,24 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. -#pragma warning disable CA1812 -#pragma warning disable CA1852 - -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Experimental.Agents.Models; - -internal sealed class ToolResultModel -{ - private static readonly object s_placeholder = new(); - - /// - /// The tool call identifier. - /// - [JsonPropertyName("tool_call_id")] - public string CallId { get; set; } = string.Empty; - - /// - /// The tool output - /// - [JsonPropertyName("output")] - public object Output { get; set; } = s_placeholder; -} diff --git a/dotnet/src/Experimental/Agents/README.md b/dotnet/src/Experimental/Agents/README.md new file mode 100644 index 000000000000..34cd4987e58e --- /dev/null +++ b/dotnet/src/Experimental/Agents/README.md @@ -0,0 +1,19 @@ +# Notice + +The experimental agents project/package has reached end-of-life and has been removed. + +While the nuget packages continue to be available, they are not recommended for use. + +In place of this experimental framework, we recommend targeting the _Semantic Kernel Agent Framework_. + +**Source:** +- https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/Agents + +**Samples:** +- https://github.com/microsoft/semantic-kernel/tree/main/dotnet/samples/GettingStartedWithAgents +- https://github.com/microsoft/semantic-kernel/tree/main/dotnet/samples/Concepts/Agents + +**Packages:** +- https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.Abstractions +- https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.Core +- https://www.nuget.org/packages/Microsoft.SemanticKernel.Agents.OpenAI \ No newline at end of file diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj index a3f5a93a7013..6fdfb01ffa75 100644 --- a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj +++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/Experimental.Orchestration.Flow.IntegrationTests.csproj @@ -28,7 +28,7 @@ - + diff --git a/dotnet/src/Experimental/Process.Abstractions/AssemblyInfo.cs b/dotnet/src/Experimental/Process.Abstractions/AssemblyInfo.cs new file mode 100644 index 000000000000..ba9a54f203e2 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/AssemblyInfo.cs @@ -0,0 +1,6 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0080")] diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcess.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcess.cs new file mode 100644 index 000000000000..49115277f585 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcess.cs @@ -0,0 +1,33 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel; + +/// +/// A serializable representation of a Process. +/// +public sealed record KernelProcess : KernelProcessStepInfo +{ + /// + /// The collection of Steps in the Process. + /// + public IList Steps { get; } + + /// + /// Creates a new instance of the class. + /// + /// The process state. + /// The steps of the process. + /// The edges of the process. + public KernelProcess(KernelProcessState state, IList steps, Dictionary>? edges = null) + : base(typeof(KernelProcess), state, edges ?? []) + { + Verify.NotNull(state); + Verify.NotNull(steps); + Verify.NotNullOrWhiteSpace(state.Name); + + this.Steps = []; + this.Steps.AddRange(steps); + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessEdge.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessEdge.cs new file mode 100644 index 000000000000..e474df9658c6 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessEdge.cs @@ -0,0 +1,31 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// A serializable representation of an edge between a source and a . +/// +public sealed class KernelProcessEdge +{ + /// + /// The unique identifier of the source Step. + /// + public string SourceStepId { get; } + + /// + /// The collection of s that are the output of the source Step. + /// + public KernelProcessFunctionTarget OutputTarget { get; } + + /// + /// Creates a new instance of the class. + /// + public KernelProcessEdge(string sourceStepId, KernelProcessFunctionTarget outputTargets) + { + Verify.NotNullOrWhiteSpace(sourceStepId); + Verify.NotNull(outputTargets); + + this.SourceStepId = sourceStepId; + this.OutputTarget = outputTargets; + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessEvent.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessEvent.cs new file mode 100644 index 000000000000..773f9a74f762 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessEvent.cs @@ -0,0 +1,24 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// An class representing an event that can be emitted from a . This type is convertible to and from CloudEvents. +/// +public sealed record KernelProcessEvent +{ + /// + /// The unique identifier for the event. + /// + public string? Id { get; set; } + + /// + /// An optional data payload associated with the event. + /// + public object? Data { get; set; } + + /// + /// The visibility of the event. Defaults to . + /// + public KernelProcessEventVisibility Visibility { get; set; } = KernelProcessEventVisibility.Internal; +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessEventVisibility.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessEventVisibility.cs new file mode 100644 index 000000000000..7d5cf5c5a677 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessEventVisibility.cs @@ -0,0 +1,21 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// An enumeration representing the visibility of a . This is used to determine +/// if the event is kept within the process it's emitted in, or exposed to external processes and systems. +/// +public enum KernelProcessEventVisibility +{ + /// + /// The event is only visible to steps within the same process. + /// + Internal, + + /// + /// The event is visible inside the process as well as outside the process. This is useful + /// when the event is intended to be consumed by other processes or external systems. + /// + Public +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessFunctionTarget.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessFunctionTarget.cs new file mode 100644 index 000000000000..8a388347f35c --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessFunctionTarget.cs @@ -0,0 +1,43 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// A serializable representation of a specific parameter of a specific function of a specific Step. +/// +public record KernelProcessFunctionTarget +{ + /// + /// Creates an instance of the class. + /// + public KernelProcessFunctionTarget(string stepId, string functionName, string? parameterName = null, string? targetEventId = null) + { + Verify.NotNullOrWhiteSpace(stepId); + Verify.NotNullOrWhiteSpace(functionName); + + this.StepId = stepId; + this.FunctionName = functionName; + this.ParameterName = parameterName; + this.TargetEventId = targetEventId; + } + + /// + /// The unique identifier of the Step being targeted. + /// + public string StepId { get; init; } + + /// + /// The name if the Kernel Function to target. + /// + public string FunctionName { get; init; } + + /// + /// The name of the parameter to target. This may be null if the function has no parameters. + /// + public string? ParameterName { get; init; } + + /// + /// The unique identifier for the event to target. This may be null if the target is not a sub-process. + /// + public string? TargetEventId { get; init; } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessMessageChannel.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessMessageChannel.cs new file mode 100644 index 000000000000..330ce7ac1093 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessMessageChannel.cs @@ -0,0 +1,18 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// An abstract class that represents a channel for emitting messages from a step. +/// +public abstract class KernelProcessMessageChannel +{ + /// + /// Emits the specified event from the step. + /// + /// The event to emit. + /// A + public abstract ValueTask EmitEventAsync(KernelProcessEvent processEvent); +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessState.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessState.cs new file mode 100644 index 000000000000..bb09b2068195 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessState.cs @@ -0,0 +1,19 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Represents the state of a process. +/// +public sealed record KernelProcessState : KernelProcessStepState +{ + /// + /// Initializes a new instance of the class. + /// + /// The name of the associated + /// The Id of the associated + public KernelProcessState(string name, string? id = null) + : base(name, id) + { + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStep.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStep.cs new file mode 100644 index 000000000000..c3162340bb35 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStep.cs @@ -0,0 +1,30 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// Process Step. Derive from this class to create a new Step for a Process. +/// +public class KernelProcessStep +{ + /// + public virtual ValueTask ActivateAsync(KernelProcessStepState state) + { + return default; + } +} + +/// +/// Process Step. Derive from this class to create a new Step with user-defined state of type TState for a Process. +/// +/// An instance of TState used for user-defined state. +public class KernelProcessStep : KernelProcessStep where TState : class, new() +{ + /// + public virtual ValueTask ActivateAsync(KernelProcessStepState state) + { + return default; + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs new file mode 100644 index 000000000000..89d0b3148bcc --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepContext.cs @@ -0,0 +1,32 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides step related functionality for Kernel Functions running in a step. +/// +public sealed class KernelProcessStepContext +{ + private readonly KernelProcessMessageChannel _stepMessageChannel; + + /// + /// Initializes a new instance of the class. + /// + /// An instance of . + public KernelProcessStepContext(KernelProcessMessageChannel channel) + { + this._stepMessageChannel = channel; + } + + /// + /// Emit an event from the current step. + /// + /// An instance of to be emitted from the + /// A + public ValueTask EmitEventAsync(KernelProcessEvent processEvent) + { + return this._stepMessageChannel.EmitEventAsync(processEvent); + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepInfo.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepInfo.cs new file mode 100644 index 000000000000..7273ccf875ea --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepInfo.cs @@ -0,0 +1,58 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Microsoft.SemanticKernel; + +/// +/// Contains information about a Step in a Process including it's state and edges. +/// +public record KernelProcessStepInfo +{ + private KernelProcessStepState _state; + + /// + /// A mapping of output edges from the Step using the . + /// + private readonly Dictionary> _outputEdges; + + /// + /// The type of the inner step. + /// + public Type InnerStepType { get; } + + /// + /// The state of the Step. + /// + public KernelProcessStepState State + { + get => this._state; + init + { + Verify.NotNull(value); + this._state = value; + } + } + + /// + /// A read-only dictionary of output edges from the Step. + /// + public IReadOnlyDictionary> Edges => + this._outputEdges.ToDictionary(kvp => kvp.Key, kvp => (IReadOnlyCollection)kvp.Value.AsReadOnly()); + + /// + /// Initializes a new instance of the class. + /// + public KernelProcessStepInfo(Type innerStepType, KernelProcessStepState state, Dictionary> edges) + { + Verify.NotNull(innerStepType); + Verify.NotNull(edges); + Verify.NotNull(state); + + this.InnerStepType = innerStepType; + this._outputEdges = edges; + this._state = state; + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepState.cs b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepState.cs new file mode 100644 index 000000000000..fb90d70f8d2a --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/KernelProcessStepState.cs @@ -0,0 +1,60 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Represents the state of an individual step in a process. +/// +public record KernelProcessStepState +{ + /// + /// The identifier of the Step which is required to be unique within an instance of a Process. + /// This may be null until a process containing this step has been invoked. + /// + public string? Id { get; init; } + + /// + /// The name of the Step. This is itended to be human readable and is not required to be unique. If + /// not provided, the name will be derived from the steps .NET type. + /// + public string Name { get; init; } + + /// + /// Initializes a new instance of the class. + /// + /// The name of the associated + /// The Id of the associated + public KernelProcessStepState(string name, string? id = null) + { + Verify.NotNullOrWhiteSpace(name); + + this.Id = id; + this.Name = name; + } +} + +/// +/// Represents the state of an individual step in a process that includes a user-defined state object. +/// +/// The type of the user-defined state. +public sealed record KernelProcessStepState : KernelProcessStepState where TState : class, new() +{ + /// + /// The user-defined state object associated with the Step. + /// + public TState? State { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// The name of the associated + /// The Id of the associated + public KernelProcessStepState(string name, string? id = null) + : base(name, id) + { + Verify.NotNullOrWhiteSpace(name); + + this.Id = id; + this.Name = name; + } +} diff --git a/dotnet/src/Experimental/Process.Abstractions/Process.Abstractions.csproj b/dotnet/src/Experimental/Process.Abstractions/Process.Abstractions.csproj new file mode 100644 index 000000000000..b12e68c67e71 --- /dev/null +++ b/dotnet/src/Experimental/Process.Abstractions/Process.Abstractions.csproj @@ -0,0 +1,30 @@ +๏ปฟ + + + + Microsoft.SemanticKernel.Process.Abstractions + Microsoft.SemanticKernel.Process + net8.0;netstandard2.0 + false + alpha + + + + + + + + Semantic Kernel Process - Abstractions + Semantic Kernel Process abstractions. This package is automatically installed by Semantic Kernel Process packages if needed. + + + + + + + + + + + + diff --git a/dotnet/src/Experimental/Process.Core/AssemblyInfo.cs b/dotnet/src/Experimental/Process.Core/AssemblyInfo.cs new file mode 100644 index 000000000000..ba9a54f203e2 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/AssemblyInfo.cs @@ -0,0 +1,6 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0080")] diff --git a/dotnet/src/Experimental/Process.Core/Extensions/ProcessTypeExtensions.cs b/dotnet/src/Experimental/Process.Core/Extensions/ProcessTypeExtensions.cs new file mode 100644 index 000000000000..e84f691a7d95 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/Extensions/ProcessTypeExtensions.cs @@ -0,0 +1,39 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel.Process; + +/// +/// Provides extension methods for instances related to process steps. +/// +internal static class ProcessTypeExtensions +{ + /// + /// The generic state type for a process step. + /// + private static readonly Type s_genericType = typeof(KernelProcessStep<>); + + /// + /// Attempts to find an instance of ']]> within the provided types hierarchy. + /// + /// The type to examine. + /// The matching type if found, otherwise null. + /// True if a match is found, false otherwise. + public static bool TryGetSubtypeOfStatefulStep(this Type? type, out Type? genericStateType) + { + while (type != null && type != typeof(object)) + { + if (type.IsGenericType && type.GetGenericTypeDefinition() == s_genericType) + { + genericStateType = type; + return true; + } + + type = type.BaseType; + } + + genericStateType = null; + return false; + } +} diff --git a/dotnet/src/Experimental/Process.Core/Internal/EndStep.cs b/dotnet/src/Experimental/Process.Core/Internal/EndStep.cs new file mode 100644 index 000000000000..287a2b488f1d --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/Internal/EndStep.cs @@ -0,0 +1,48 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel; + +/// +/// EndStep is a special purpose step that is used to trigger a process to stop. It is the last step in a process. +/// +internal sealed class EndStep : ProcessStepBuilder +{ + private const string EndStepValue = "END"; + + /// + /// The name of the end step. + /// + public const string EndStepName = EndStepValue; + + /// + /// The event ID for stopping a process. + /// + public const string EndStepId = EndStepValue; + + /// + /// The static instance of the class. + /// + public static EndStep Instance { get; } = new EndStep(); + + /// + /// Represents the end of a process. + /// + internal EndStep() + : base(EndStepName) + { + } + + internal override Dictionary GetFunctionMetadataMap() + { + // The end step has no functions. + return []; + } + + internal override KernelProcessStepInfo BuildStep() + { + // The end step has no state. + return new KernelProcessStepInfo(typeof(KernelProcessStepState), new KernelProcessStepState(EndStepName), []); + } +} diff --git a/dotnet/src/Experimental/Process.Core/Process.Core.csproj b/dotnet/src/Experimental/Process.Core/Process.Core.csproj new file mode 100644 index 000000000000..b872069bc6a1 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/Process.Core.csproj @@ -0,0 +1,31 @@ +๏ปฟ + + + + Microsoft.SemanticKernel.Process.Core + Microsoft.SemanticKernel.Process + net8.0;netstandard2.0 + false + alpha + + + + + + + + Semantic Kernel Process - Core + Semantic Kernel Process core. This package is automatically installed by Semantic Kernel Process packages if needed. + + + + + + + + + + + + + diff --git a/dotnet/src/Experimental/Process.Core/ProcessBuilder.cs b/dotnet/src/Experimental/Process.Core/ProcessBuilder.cs new file mode 100644 index 000000000000..fa509daebfd5 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/ProcessBuilder.cs @@ -0,0 +1,190 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides functionality for incrementally defining a process. +/// +public sealed class ProcessBuilder : ProcessStepBuilder +{ + /// The collection of steps within this process. + private readonly List _steps = []; + + /// The collection of entry steps within this process. + private readonly List _entrySteps = []; + + /// Maps external event Ids to the target entry step for the event. + private readonly Dictionary _externalEventTargetMap = []; + + /// + /// A boolean indicating if the current process is a step within another process. + /// + internal bool HasParentProcess { get; set; } + + /// + /// Used to resolve the target function and parameter for a given optional function name and parameter name. + /// This is used to simplify the process of creating a by making it possible + /// to infer the function and/or parameter names from the function metadata if only one option exists. + /// + /// The name of the function. May be null if only one function exists on the step. + /// The name of the parameter. May be null if only one parameter exists on the function. + /// A valid instance of for this step. + /// + internal override KernelProcessFunctionTarget ResolveFunctionTarget(string? functionName, string? parameterName) + { + // Try to resolve the function target on each of the registered entry points. + var targets = new List(); + foreach (var step in this._entrySteps) + { + try + { + targets.Add(step.ResolveFunctionTarget(functionName, parameterName)); + } + catch (KernelException) + { + // If the function is not found on the source step, then we can ignore it. + } + } + + // If no targets were found or if multiple targets were found, throw an exception. + if (targets.Count == 0) + { + throw new InvalidOperationException($"No targets found for the specified function and parameter '{functionName}.{parameterName}'."); + } + else if (targets.Count > 1) + { + throw new InvalidOperationException($"Multiple targets found for the specified function and parameter '{functionName}.{parameterName}'."); + } + + return targets[0]; + } + + /// + internal override void LinkTo(string eventId, ProcessStepEdgeBuilder edgeBuilder) + { + Verify.NotNull(edgeBuilder?.Source, nameof(edgeBuilder.Source)); + Verify.NotNull(edgeBuilder?.Target, nameof(edgeBuilder.Target)); + + // Keep track of the entry point steps + this._entrySteps.Add(edgeBuilder.Source); + this._externalEventTargetMap[eventId] = edgeBuilder.Target; + base.LinkTo(eventId, edgeBuilder); + } + + /// + internal override Dictionary GetFunctionMetadataMap() + { + // The process has no kernel functions of its own, but it does expose the functions from its entry steps. + // Merge the function metadata map from each of the entry steps. + return this._entrySteps.SelectMany(step => step.GetFunctionMetadataMap()) + .ToDictionary(pair => pair.Key, pair => pair.Value); + } + + /// + /// Builds the step. + /// + /// + internal override KernelProcessStepInfo BuildStep() + { + // The process is a step so we can return the step info directly. + return this.Build(); + } + + #region Public Interface + + /// + /// A read-only collection of steps in the process. + /// + public IReadOnlyList Steps => this._steps.AsReadOnly(); + + /// + /// Adds a step to the process. + /// + /// The step Type. + /// The name of the step. This parameter is optional. + /// An instance of + public ProcessStepBuilder AddStepFromType(string? name = null) where TStep : KernelProcessStep + { + var stepBuilder = new ProcessStepBuilder(name); + this._steps.Add(stepBuilder); + + return stepBuilder; + } + + /// + /// Adds a sub process to the process. + /// + /// The process to add as a step. + /// An instance of + public ProcessBuilder AddStepFromProcess(ProcessBuilder kernelProcess) + { + kernelProcess.HasParentProcess = true; + this._steps.Add(kernelProcess); + return kernelProcess; + } + + /// + /// Provides an instance of for defining an edge to a + /// step inside the process for a given external event. + /// + /// The Id of the external event. + /// An instance of + public ProcessEdgeBuilder OnInputEvent(string eventId) + { + return new ProcessEdgeBuilder(this, eventId); + } + + /// + /// Retrieves the target for a given external event. The step associated with the target is the process itself (this). + /// + /// The Id of the event + /// An instance of + /// + public ProcessFunctionTargetBuilder WhereInputEventIs(string eventId) + { + Verify.NotNullOrWhiteSpace(eventId); + + if (!this._externalEventTargetMap.TryGetValue(eventId, out var target)) + { + throw new KernelException($"The process named '{this.Name}' does not expose an event with Id '{eventId}'."); + } + + // Targets for external events on a process should be scoped to the process itself rather than the step inside the process. + var processTarget = target with { Step = this, TargetEventId = eventId }; + return processTarget; + } + + /// + /// Builds the process. + /// + /// An instance of + /// + public KernelProcess Build() + { + // Build the edges first + var builtEdges = this.Edges.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Select(e => e.Build()).ToList()); + + // Build the steps + var builtSteps = this._steps.Select(step => step.BuildStep()).ToList(); + + // Create the process + var state = new KernelProcessState(this.Name, id: this.HasParentProcess ? this.Id : null); + var process = new KernelProcess(state, builtSteps, builtEdges); + return process; + } + + /// + /// Initializes a new instance of the class. + /// + /// The name of the process. This is required. + public ProcessBuilder(string name) + : base(name) + { + } + + #endregion +} diff --git a/dotnet/src/Experimental/Process.Core/ProcessEdgeBuilder.cs b/dotnet/src/Experimental/Process.Core/ProcessEdgeBuilder.cs new file mode 100644 index 000000000000..64441ad29661 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/ProcessEdgeBuilder.cs @@ -0,0 +1,42 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Provides functionality for incrementally defining a process edge. +/// +public sealed class ProcessEdgeBuilder +{ + internal ProcessFunctionTargetBuilder? Target { get; set; } + + /// + /// The event Id that the edge fires on. + /// + internal string EventId { get; } + + /// + /// The source step of the edge. + /// + internal ProcessStepBuilder Source { get; } + + /// + /// Initializes a new instance of the class. + /// + /// The source step. + /// The Id of the event. + internal ProcessEdgeBuilder(ProcessBuilder source, string eventId) + { + this.Source = source; + this.EventId = eventId; + } + + /// + /// Sends the output of the source step to the specified target when the associated event fires. + /// + public void SendEventTo(ProcessFunctionTargetBuilder target) + { + this.Target = target; + ProcessStepEdgeBuilder edgeBuilder = new(this.Source, this.EventId) { Target = this.Target }; + this.Source.LinkTo(this.EventId, edgeBuilder); + } +} diff --git a/dotnet/src/Experimental/Process.Core/ProcessFunctionTargetBuilder.cs b/dotnet/src/Experimental/Process.Core/ProcessFunctionTargetBuilder.cs new file mode 100644 index 000000000000..6c2d29f67cc9 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/ProcessFunctionTargetBuilder.cs @@ -0,0 +1,66 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel; + +/// +/// Provides functionality for incrementally defining a process function target. +/// +public sealed record ProcessFunctionTargetBuilder +{ + /// + /// Initializes a new instance of the class. + /// + /// The step to target. + /// The function to target. + /// The parameter to target. + public ProcessFunctionTargetBuilder(ProcessStepBuilder step, string? functionName = null, string? parameterName = null) + { + Verify.NotNull(step); + this.Step = step; + + // If the step is an EndStep, we don't need to resolve the function target. + if (step is EndStep) + { + this.FunctionName = "END"; + this.ParameterName = null; + return; + } + + // Make sure the function target is valid. + var target = step.ResolveFunctionTarget(functionName, parameterName); + Verify.NotNull(target); + + this.FunctionName = target.FunctionName!; + this.ParameterName = target.ParameterName; + } + + /// + /// Builds the function target. + /// + /// An instance of + internal KernelProcessFunctionTarget Build() + { + Verify.NotNull(this.Step.Id); + return new KernelProcessFunctionTarget(this.Step.Id, this.FunctionName, this.ParameterName, this.TargetEventId); + } + + /// + /// An instance of representing the target Step. + /// + public ProcessStepBuilder Step { get; init; } + + /// + /// The name of the function to target. + /// + public string FunctionName { get; init; } + + /// + /// The name of the parameter to target. This may be null if the function has no parameters. + /// + public string? ParameterName { get; init; } + + /// + /// The unique identifier for the event to target. This may be null if the target is not a sub-process. + /// + public string? TargetEventId { get; init; } +} diff --git a/dotnet/src/Experimental/Process.Core/ProcessStepBuilder.cs b/dotnet/src/Experimental/Process.Core/ProcessStepBuilder.cs new file mode 100644 index 000000000000..587f2a1c1693 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/ProcessStepBuilder.cs @@ -0,0 +1,239 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Process; + +namespace Microsoft.SemanticKernel; + +/// +/// An abstract class that provides functionality for incrementally defining a process step and linking it to other steps within a Process. +/// +public abstract class ProcessStepBuilder +{ + #region Public Interface + + /// + /// The unique identifier for the step. This may be null until the step is run within a process. + /// + public string? Id { get; } + + /// + /// The name of the step. This is intended to be a human-readable name and is not required to be unique. + /// + public string Name { get; } + + /// + /// Define the behavior of the step when the event with the specified Id is fired. + /// + /// The Id of the event of interest. + /// An instance of . + public virtual ProcessStepEdgeBuilder OnEvent(string eventId) + { + // scope the event to this instance of this step + var scopedEventId = this.GetScopedEventId(eventId); + return new ProcessStepEdgeBuilder(this, scopedEventId); + } + + /// + /// Define the behavior of the step when the specified function has been successfully invoked. + /// + /// The name of the function of interest. + /// An instance of . + public virtual ProcessStepEdgeBuilder OnFunctionResult(string functionName) + { + return this.OnEvent($"{functionName}.OnResult"); + } + + #endregion + + /// The namespace for events that are scoped to this step. + private readonly string _eventNamespace; + + /// + /// A mapping of function names to the functions themselves. + /// + internal Dictionary FunctionsDict { get; set; } + + /// + /// A mapping of event Ids to the edges that are triggered by those events. + /// + internal Dictionary> Edges { get; } + + /// + /// Builds the step. + /// + /// an instance of . + internal abstract KernelProcessStepInfo BuildStep(); + + /// + /// Links the output of the current step to the an input of another step via the specified event type. + /// + /// The Id of the event. + /// The targeted function. + internal virtual void LinkTo(string eventId, ProcessStepEdgeBuilder edgeBuilder) + { + if (!this.Edges.TryGetValue(eventId, out List? edges) || edges == null) + { + edges = []; + this.Edges[eventId] = edges; + } + + edges.Add(edgeBuilder); + } + + /// + /// Used to resolve the target function and parameter for a given optional function name and parameter name. + /// This is used to simplify the process of creating a by making it possible + /// to infer the function and/or parameter names from the function metadata if only one option exists. + /// + /// The name of the function. May be null if only one function exists on the step. + /// The name of the parameter. May be null if only one parameter exists on the function. + /// A valid instance of for this step. + /// + internal virtual KernelProcessFunctionTarget ResolveFunctionTarget(string? functionName, string? parameterName) + { + string? verifiedFunctionName = functionName; + string? verifiedParameterName = parameterName; + + if (this.FunctionsDict.Count == 0) + { + throw new KernelException($"The target step {this.Name} has no functions."); + } + + // If the function name is null or whitespace, then there can only one function on the step + if (string.IsNullOrWhiteSpace(verifiedFunctionName)) + { + if (this.FunctionsDict.Count > 1) + { + throw new KernelException("The target step has more than one function, so a function name must be provided."); + } + + verifiedFunctionName = this.FunctionsDict.Keys.First(); + } + + // Verify that the target function exists + if (!this.FunctionsDict.TryGetValue(verifiedFunctionName!, out var kernelFunctionMetadata) || kernelFunctionMetadata is null) + { + throw new KernelException($"The function {functionName} does not exist on step {this.Name}"); + } + + // If the parameter name is null or whitespace, then the function must have 0 or 1 parameters + if (string.IsNullOrWhiteSpace(verifiedParameterName)) + { + var undeterminedParameters = kernelFunctionMetadata.Parameters.Where(p => p.ParameterType != typeof(KernelProcessStepContext)).ToList(); + + if (undeterminedParameters.Count > 1) + { + throw new KernelException($"The function {functionName} on step {this.Name} has more than one parameter, so a parameter name must be provided."); + } + + // We can infer the parameter name from the function metadata + if (undeterminedParameters.Count == 1) + { + parameterName = undeterminedParameters[0].Name; + verifiedParameterName = parameterName; + } + } + + Verify.NotNull(verifiedFunctionName); + + return new KernelProcessFunctionTarget( + stepId: this.Id!, + functionName: verifiedFunctionName, + parameterName: verifiedParameterName + ); + } + + /// + /// Loads a mapping of function names to the associated functions metadata. + /// + /// A where TKey is and TValue is + internal abstract Dictionary GetFunctionMetadataMap(); + + /// + /// Given an event Id, returns a scoped event Id that is unique to this instance of the step. + /// + /// The Id of the event. + /// An Id that represents the provided event Id scoped to this step instance. + protected string GetScopedEventId(string eventId) + { + // Scope the event to this instance of this step by prefixing the event Id with the step's namespace. + return $"{this._eventNamespace}.{eventId}"; + } + + /// + /// Initializes a new instance of the class. + /// + /// The name of the step. + protected ProcessStepBuilder(string name) + { + this.Name ??= name; + Verify.NotNullOrWhiteSpace(name); + + this.FunctionsDict = []; + this.Id = Guid.NewGuid().ToString("n"); + this._eventNamespace = $"{this.Name}_{this.Id}"; + this.Edges = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } +} + +/// +/// Provides functionality for incrementally defining a process step. +/// +public sealed class ProcessStepBuilder : ProcessStepBuilder where TStep : KernelProcessStep +{ + /// + /// Creates a new instance of the class. If a name is not provided, the name will be derived from the type of the step. + /// + public ProcessStepBuilder(string? name = null) + : base(name ?? typeof(TStep).Name) + { + this.FunctionsDict = this.GetFunctionMetadataMap(); + } + + /// + /// Builds the step. + /// + /// An instance of + internal override KernelProcessStepInfo BuildStep() + { + KernelProcessStepState? stateObject = null; + + if (typeof(TStep).TryGetSubtypeOfStatefulStep(out Type? genericStepType) && genericStepType is not null) + { + // The step is a subclass of KernelProcessStep<>, so we need to extract the generic type argument + // and create an instance of the corresponding KernelProcessStepState<>. + var userStateType = genericStepType.GetGenericArguments()[0]; + Verify.NotNull(userStateType); + + var stateType = typeof(KernelProcessStepState<>).MakeGenericType(userStateType); + Verify.NotNull(stateType); + + stateObject = (KernelProcessStepState?)Activator.CreateInstance(stateType, this.Name, this.Id); + } + else + { + // The step is a KernelProcessStep with no user-defined state, so we can use the base KernelProcessStepState. + stateObject = new KernelProcessStepState(this.Name, this.Id); + } + + Verify.NotNull(stateObject); + + // Build the edges first + var builtEdges = this.Edges.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.Select(e => e.Build()).ToList()); + + // Then build the step with the edges and state. + var builtStep = new KernelProcessStepInfo(typeof(TStep), stateObject, builtEdges); + return builtStep; + } + + /// + internal override Dictionary GetFunctionMetadataMap() + { + // TODO: Should not have to create a new instance of the step to get the functions metadata. + var functions = KernelPluginFactory.CreateFromType(); + return functions.ToDictionary(f => f.Name, f => f.Metadata); + } +} diff --git a/dotnet/src/Experimental/Process.Core/ProcessStepEdgeBuilder.cs b/dotnet/src/Experimental/Process.Core/ProcessStepEdgeBuilder.cs new file mode 100644 index 000000000000..1d1c17b698d0 --- /dev/null +++ b/dotnet/src/Experimental/Process.Core/ProcessStepEdgeBuilder.cs @@ -0,0 +1,78 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides functionality for incrementally defining a process edge. +/// +public sealed class ProcessStepEdgeBuilder +{ + internal ProcessFunctionTargetBuilder? Target { get; set; } + + /// + /// The event Id that the edge fires on. + /// + internal string EventId { get; } + + /// + /// The source step of the edge. + /// + internal ProcessStepBuilder Source { get; init; } + + /// + /// Initializes a new instance of the class. + /// + /// The source step. + /// The Id of the event. + internal ProcessStepEdgeBuilder(ProcessStepBuilder source, string eventId) + { + Verify.NotNull(source); + Verify.NotNullOrWhiteSpace(eventId); + + this.Source = source; + this.EventId = eventId; + } + + /// + /// Builds the edge. + /// + internal KernelProcessEdge Build() + { + Verify.NotNull(this.Source?.Id); + Verify.NotNull(this.Target); + + return new KernelProcessEdge(this.Source.Id, this.Target.Build()); + } + + /// + /// Signals that the output of the source step should be sent to the specified target when the associated event fires. + /// + /// The output target. + public void SendEventTo(ProcessFunctionTargetBuilder target) + { + if (this.Target is not null) + { + throw new InvalidOperationException("An output target has already been set."); + } + + this.Target = target; + this.Source.LinkTo(this.EventId, this); + } + + /// + /// Signals that the process should be stopped. + /// + public void StopProcess() + { + if (this.Target is not null) + { + throw new InvalidOperationException("An output target has already been set."); + } + + var outputTarget = new ProcessFunctionTargetBuilder(EndStep.Instance); + this.Target = outputTarget; + this.Source.LinkTo(EndStep.EndStepName, this); + } +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/AssemblyInfo.cs b/dotnet/src/Experimental/Process.LocalRuntime/AssemblyInfo.cs new file mode 100644 index 000000000000..ba9a54f203e2 --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/AssemblyInfo.cs @@ -0,0 +1,6 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0080")] diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalEvent.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalEvent.cs new file mode 100644 index 000000000000..f0dc967f424a --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalEvent.cs @@ -0,0 +1,53 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.SemanticKernel.Process; + +/// +/// A wrapper around that helps to manage the namespace of the event. +/// +internal record LocalEvent +{ + /// + /// The inner that this wraps. + /// + private KernelProcessEvent InnerEvent { get; init; } + + /// + /// The namespace of the event. + /// + internal string? Namespace { get; init; } + + /// + /// The Id of the event. + /// + internal string Id => $"{this.Namespace}.{this.InnerEvent.Id}"; + + /// + /// The data of the event. + /// + internal object? Data => this.InnerEvent.Data; + + /// + /// The visibility of the event. + /// + internal KernelProcessEventVisibility Visibility => this.InnerEvent.Visibility; + + /// + /// Initializes a new instance of the class. + /// + /// The namespace of the event. + /// The instance of that this came from. + internal LocalEvent(string? eventNamespace, KernelProcessEvent innerEvent) + { + this.Namespace = eventNamespace; + this.InnerEvent = innerEvent; + } + + /// + /// Creates a new from a . + /// + /// The + /// The namespace of the event. + /// An instance of + internal static LocalEvent FromKernelProcessEvent(KernelProcessEvent kernelProcessEvent, string Namespace) => new(Namespace, kernelProcessEvent); +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs new file mode 100644 index 000000000000..58fe08611b7c --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalKernelProcessContext.cs @@ -0,0 +1,59 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// Provides context and actions on a process that is running locally. +/// +public sealed class LocalKernelProcessContext : IDisposable +{ + private readonly LocalProcess _localProcess; + private readonly Kernel _kernel; + + internal LocalKernelProcessContext(KernelProcess process, Kernel kernel) + { + Verify.NotNull(process); + Verify.NotNullOrWhiteSpace(process.State?.Name); + Verify.NotNull(kernel); + + this._kernel = kernel; + this._localProcess = new LocalProcess( + process, + kernel: kernel, + parentProcessId: null, + loggerFactory: null); + } + + internal async Task StartWithEventAsync(KernelProcessEvent? initialEvent, Kernel? kernel = null) + { + await this._localProcess.RunOnceAsync(initialEvent).ConfigureAwait(false); + } + + /// + /// Sends a message to the process. + /// + /// The event to sent to the process. + /// A + public async Task SendEventAsync(KernelProcessEvent processEvent) => + await this._localProcess.SendMessageAsync(processEvent).ConfigureAwait(false); + + /// + /// Stops the process. + /// + /// A + public async Task StopAsync() => await this._localProcess.StopAsync().ConfigureAwait(false); + + /// + /// Gets a snapshot of the current state of the process. + /// + /// A where T is + public async Task GetStateAsync() => await this._localProcess.GetProcessInfoAsync().ConfigureAwait(false); + + /// + /// Disposes of the resources used by the process. + /// + public void Dispose() => this._localProcess?.Dispose(); +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalMessage.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalMessage.cs new file mode 100644 index 000000000000..587f07deb6e4 --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalMessage.cs @@ -0,0 +1,47 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel; +/// +/// Represents a local message used in the local runtime. +/// +/// +/// Initializes a new instance of the class. +/// +/// The source identifier of the message. +/// The destination identifier of the message. +/// The name of the function associated with the message. +/// The dictionary of values associated with the message. +internal record LocalMessage(string sourceId, string destinationId, string functionName, Dictionary values) +{ + /// + /// Gets the source identifier of the message. + /// + public string SourceId { get; } = sourceId; + + /// + /// Gets the destination identifier of the message. + /// + public string DestinationId { get; } = destinationId; + + /// + /// Gets the name of the function associated with the message. + /// + public string FunctionName { get; } = functionName; + + /// + /// Gets the dictionary of values associated with the message. + /// + public Dictionary Values { get; } = values; + + /// + /// The Id of the target event. This may be null if the message is not targeting a sub-process. + /// + public string? TargetEventId { get; init; } + + /// + /// The data associated with the target event. This may be null if the message is not targeting a sub-process. + /// + public object? TargetEventData { get; init; } +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalMessageFactory.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalMessageFactory.cs new file mode 100644 index 000000000000..0f11354b7e7c --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalMessageFactory.cs @@ -0,0 +1,35 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Process; + +/// +/// A factory class for creating instances. +/// +internal static class LocalMessageFactory +{ + /// + /// Creates a new instance from a and a data object. + /// + /// An instance of + /// A data object. + /// An instance of + internal static LocalMessage CreateFromEdge(KernelProcessEdge edge, object? data) + { + var target = edge.OutputTarget; + Dictionary parameterValue = []; + if (!string.IsNullOrWhiteSpace(target.ParameterName)) + { + parameterValue.Add(target.ParameterName!, data); + } + + LocalMessage newMessage = new(edge.SourceStepId, target.StepId, target.FunctionName, parameterValue) + { + TargetEventId = target.TargetEventId, + TargetEventData = data + }; + + return newMessage; + } +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs new file mode 100644 index 000000000000..757029f1ced4 --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcess.cs @@ -0,0 +1,386 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Process; +using Microsoft.VisualStudio.Threading; + +namespace Microsoft.SemanticKernel; + +internal sealed class LocalProcess : LocalStep, IDisposable +{ + private const string EndProcessId = "END"; + private readonly JoinableTaskFactory _joinableTaskFactory; + private readonly JoinableTaskContext _joinableTaskContext; + private readonly Channel _externalEventChannel; + private readonly Lazy _initializeTask; + + internal readonly List _stepsInfos; + internal readonly List _steps = []; + internal readonly KernelProcess _process; + internal readonly Kernel _kernel; + + private readonly ILogger _logger; + private JoinableTask? _processTask; + private CancellationTokenSource? _processCancelSource; + + /// + /// Initializes a new instance of the class. + /// + /// The instance. + /// An instance of + /// Optional. The Id of the parent process if one exists, otherwise null. + /// Optional. A . + internal LocalProcess(KernelProcess process, Kernel kernel, string? parentProcessId = null, ILoggerFactory? loggerFactory = null) + : base(process, kernel, parentProcessId, loggerFactory) + { + Verify.NotNull(process); + Verify.NotNull(process.Steps); + Verify.NotNull(kernel); + + this._stepsInfos = new List(process.Steps); + this._kernel = kernel; + this._process = process; + this._initializeTask = new Lazy(this.InitializeProcessAsync); + this._externalEventChannel = Channel.CreateUnbounded(); + this._joinableTaskContext = new JoinableTaskContext(); + this._joinableTaskFactory = new JoinableTaskFactory(this._joinableTaskContext); + this._logger = this.LoggerFactory?.CreateLogger(this.Name) ?? new NullLogger(); + } + + /// + /// Starts the process with an initial event and an optional kernel. + /// + /// The instance to use within the running process. + /// Indicates if the process should wait for external events after it's finished processing. + /// + internal async Task StartAsync(Kernel? kernel = null, bool keepAlive = true) + { + // Lazy one-time initialization of the process before staring it. + await this._initializeTask.Value.ConfigureAwait(false); + + this._processCancelSource = new CancellationTokenSource(); + this._processTask = this._joinableTaskFactory.RunAsync(() + => this.Internal_ExecuteAsync(kernel, keepAlive: keepAlive, cancellationToken: this._processCancelSource.Token)); + } + + /// + /// Starts the process with an initial event and then waits for the process to finish. In this case the process will not + /// keep alive waiting for external events after the internal messages have stopped. + /// + /// Required. The to start the process with. + /// Optional. A to use when executing the process. + /// A + internal async Task RunOnceAsync(KernelProcessEvent? processEvent, Kernel? kernel = null) + { + Verify.NotNull(processEvent); + await this._externalEventChannel.Writer.WriteAsync(processEvent).ConfigureAwait(false); + await this.StartAsync(kernel, keepAlive: false).ConfigureAwait(false); + await this._processTask!.JoinAsync().ConfigureAwait(false); + } + + /// + /// Stops a running process. This will cancel the process and wait for it to complete before returning. + /// + /// A + internal async Task StopAsync() + { + if (this._processTask is null || this._processCancelSource is null || this._processTask.IsCompleted) + { + return; + } + + // Cancel the process and wait for it to complete. + this._processCancelSource.Cancel(); + + try + { + await this._processTask; + } + catch (OperationCanceledException) + { + // The task was cancelled, so we can ignore this exception. + } + finally + { + this._processCancelSource.Dispose(); + } + } + + /// + /// Sends a message to the process. This does not start the process if it's not already running, in + /// this case the message will remain queued until the process is started. + /// + /// Required. The to start the process with. + /// Optional. A to use when executing the process. + /// A + internal async Task SendMessageAsync(KernelProcessEvent processEvent, Kernel? kernel = null) + { + Verify.NotNull(processEvent); + await this._externalEventChannel.Writer.WriteAsync(processEvent).ConfigureAwait(false); + } + + /// + /// Gets the process information. + /// + /// An instance of + internal async Task GetProcessInfoAsync() + { + return await this.ToKernelProcessAsync().ConfigureAwait(false); + } + + /// + /// Handles a that has been sent to the process. This happens only in the case + /// of a process (this one) running as a step within another process (this one's parent). In this case the + /// entire sub-process should be executed within a single superstep. + /// + /// The message to process. + /// A + /// + internal override async Task HandleMessageAsync(LocalMessage message) + { + if (string.IsNullOrWhiteSpace(message.TargetEventId)) + { + string errorMessage = "Internal Process Error: The target event id must be specified when sending a message to a step."; + this._logger.LogError("{ErrorMessage}", errorMessage); + throw new KernelException(errorMessage); + } + + string eventId = message.TargetEventId!; + if (this._outputEdges!.TryGetValue(eventId, out List? edges) && edges is not null) + { + foreach (var edge in edges) + { + // Create the external event that will be used to start the nested process. Since this event came + // from outside this processes, we set the visibility to internal so that it's not emitted back out again. + var nestedEvent = new KernelProcessEvent() { Id = eventId, Data = message.TargetEventData, Visibility = KernelProcessEventVisibility.Internal }; + + // Run the nested process completely within a single superstep. + await this.RunOnceAsync(nestedEvent, this._kernel).ConfigureAwait(false); + } + } + } + + #region Private Methods + + /// + /// Loads the process and initializes the steps. Once this is complete the process can be started. + /// + /// A + private ValueTask InitializeProcessAsync() + { + // Initialize the input and output edges for the process + this._outputEdges = this._process.Edges.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.ToList()); + + // Initialize the steps within this process + foreach (var step in this._stepsInfos) + { + LocalStep? localStep = null; + + // The current step should already have a name. + Verify.NotNull(step.State?.Name); + + if (step is KernelProcess kernelStep) + { + // The process will only have an Id if its already been executed. + if (string.IsNullOrWhiteSpace(kernelStep.State.Id)) + { + kernelStep = kernelStep with { State = kernelStep.State with { Id = Guid.NewGuid().ToString() } }; + } + + var process = new LocalProcess( + process: kernelStep, + kernel: this._kernel, + parentProcessId: this.Id, + loggerFactory: this.LoggerFactory); + + //await process.StartAsync(kernel: this._kernel, keepAlive: true).ConfigureAwait(false); + localStep = process; + } + else + { + // The current step should already have an Id. + Verify.NotNull(step.State?.Id); + + localStep = new LocalStep( + stepInfo: step, + kernel: this._kernel, + parentProcessId: this.Id, + loggerFactory: this.LoggerFactory); + } + + this._steps.Add(localStep); + } + + return default; + } + + /// + /// Initializes this process as a step within another process. + /// + /// A + /// + protected override ValueTask InitializeStepAsync() + { + // The process does not need any further initialization as it's already been initialized. + // Override the base method to prevent it from being called. + return default; + } + + private async Task Internal_ExecuteAsync(Kernel? kernel = null, int maxSupersteps = 100, bool keepAlive = true, CancellationToken cancellationToken = default) + { + Kernel localKernel = kernel ?? this._kernel; + Queue messageChannel = new(); + + try + { + // Run the Pregel algorithm until there are no more messages being sent. + LocalStep? finalStep = null; + for (int superstep = 0; superstep < maxSupersteps; superstep++) + { + // Check for external events + this.EnqueueExternalMessages(messageChannel); + + // Get all of the messages that have been sent to the steps within the process and queue them up for processing. + foreach (var step in this._steps) + { + this.EnqueueStepMessages(step, messageChannel); + } + + // Complete the writing side, indicating no more messages in this superstep. + var messagesToProcess = messageChannel.ToList(); + messageChannel.Clear(); + + // If there are no messages to process, wait for an external event. + if (messagesToProcess.Count == 0) + { + if (!keepAlive || !await this._externalEventChannel.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) + { + this._processCancelSource?.Cancel(); + break; + } + } + + List messageTasks = []; + foreach (var message in messagesToProcess) + { + // Check for end condition + if (message.DestinationId.Equals(EndProcessId, StringComparison.OrdinalIgnoreCase)) + { + this._processCancelSource?.Cancel(); + break; + } + + var destinationStep = this._steps.First(v => v.Id == message.DestinationId); + + // Send a message to the step + messageTasks.Add(destinationStep.HandleMessageAsync(message)); + finalStep = destinationStep; + } + + await Task.WhenAll(messageTasks).ConfigureAwait(false); + } + } + catch (Exception ex) + { + this._logger?.LogError("An error occurred while running the process: {ErrorMessage}.", ex.Message); + throw; + } + finally + { + if (this._processCancelSource?.IsCancellationRequested ?? false) + { + this._processCancelSource.Cancel(); + } + + this._processCancelSource?.Dispose(); + } + + return; + } + + /// + /// Processes external events that have been sent to the process, translates them to s, and enqueues + /// them to the provided message channel so that they can be processesed in the next superstep. + /// + /// The message channel where messages should be enqueued. + private void EnqueueExternalMessages(Queue messageChannel) + { + while (this._externalEventChannel.Reader.TryRead(out var externalEvent)) + { + if (this._outputEdges!.TryGetValue(externalEvent.Id!, out List? edges) && edges is not null) + { + foreach (var edge in edges) + { + LocalMessage message = LocalMessageFactory.CreateFromEdge(edge, externalEvent.Data); + messageChannel.Enqueue(message); + } + } + } + } + + /// + /// Processes events emitted by the given step in the last superstep, translates them to s, and enqueues + /// them to the provided message channel so that they can be processesed in the next superstep. + /// + /// The step containing outgoing events to process. + /// The message channel where messages should be enqueued. + private void EnqueueStepMessages(LocalStep step, Queue messageChannel) + { + var allStepEvents = step.GetAllEvents(); + foreach (var stepEvent in allStepEvents) + { + // Emit the event out of the process (this one) if it's visibility is public. + if (stepEvent.Visibility == KernelProcessEventVisibility.Public) + { + base.EmitEvent(stepEvent); + } + + // Get the edges for the event and queue up the messages to be sent to the next steps. + foreach (var edge in step.GetEdgeForEvent(stepEvent.Id!)) + { + LocalMessage message = LocalMessageFactory.CreateFromEdge(edge, stepEvent.Data); + messageChannel.Enqueue(message); + } + } + } + + /// + /// Builds a from the current . + /// + /// An instance of + /// + private async Task ToKernelProcessAsync() + { + var processState = new KernelProcessState(this.Name, this.Id); + var stepTasks = this._steps.Select(step => step.ToKernelProcessStepInfoAsync()).ToList(); + var steps = await Task.WhenAll(stepTasks).ConfigureAwait(false); + return new KernelProcess(processState, steps, this._outputEdges); + } + + /// + /// When the process is used as a step within another process, this method will be called + /// rather than ToKernelProcessAsync when extracting the state. + /// + /// A where T is + internal override async Task ToKernelProcessStepInfoAsync() + { + return await this.ToKernelProcessAsync().ConfigureAwait(false); + } + + #endregion + + public void Dispose() + { + this._externalEventChannel.Writer.Complete(); + this._joinableTaskContext.Dispose(); + this._joinableTaskContext.Dispose(); + this._processCancelSource?.Dispose(); + } +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalProcessFactory.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcessFactory.cs new file mode 100644 index 000000000000..289661310c35 --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalProcessFactory.cs @@ -0,0 +1,30 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel; + +/// +/// A class that can run a process locally or in-process. +/// +public static class LocalKernelProcessFactory +{ + /// + /// Starts the specified process. + /// + /// Required: The to start running. + /// Required: An instance of + /// Required: The initial event to start the process. + /// An instance of that can be used to interrogate or stop the running process. + public static async Task StartAsync(this KernelProcess process, Kernel kernel, KernelProcessEvent initialEvent) + { + Verify.NotNull(process); + Verify.NotNullOrWhiteSpace(process.State?.Name); + Verify.NotNull(kernel); + Verify.NotNull(initialEvent); + + var processContext = new LocalKernelProcessContext(process, kernel); + await processContext.StartWithEventAsync(initialEvent).ConfigureAwait(false); + return processContext; + } +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs b/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs new file mode 100644 index 000000000000..b031ba7b8c31 --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/LocalStep.cs @@ -0,0 +1,412 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.Process; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a step in a process that is running in-process. +/// +internal class LocalStep : KernelProcessMessageChannel +{ + /// + /// The generic state type for a process step. + /// + private static readonly Type s_genericType = typeof(KernelProcessStep<>); + + private readonly Kernel _kernel; + private readonly Queue _outgoingEventQueue = new(); + private readonly Lazy _initializeTask; + private readonly KernelProcessStepInfo _stepInfo; + private readonly string _eventNamespace; + private readonly ILogger? _logger; + + protected KernelProcessStepState _stepState; + protected Dictionary?>? _inputs = []; + protected Dictionary?>? _initialInputs = []; + protected readonly Dictionary _functions = []; + protected readonly string? ParentProcessId; + protected readonly ILoggerFactory? LoggerFactory; + protected Dictionary> _outputEdges; + + /// + /// Represents a step in a process that is running in-process. + /// + /// An instance of + /// Required. An instance of . + /// Optional. The Id of the parent process if one exists. + /// An instance of used to create loggers. + public LocalStep(KernelProcessStepInfo stepInfo, Kernel kernel, string? parentProcessId = null, ILoggerFactory? loggerFactory = null) + { + // This special handling will be removed with the refactoring of KernelProcessState + if (string.IsNullOrEmpty(stepInfo.State.Id) && stepInfo is KernelProcess) + { + stepInfo = stepInfo with { State = stepInfo.State with { Id = Guid.NewGuid().ToString() } }; + } + + Verify.NotNull(stepInfo); + Verify.NotNull(kernel); + Verify.NotNull(stepInfo.State.Id); + + this.ParentProcessId = parentProcessId; + this.LoggerFactory = loggerFactory; + this._kernel = kernel; + this._stepInfo = stepInfo; + this._stepState = stepInfo.State; + this._initializeTask = new Lazy(this.InitializeStepAsync); + this._logger = this.LoggerFactory?.CreateLogger(this._stepInfo.InnerStepType) ?? new NullLogger(); + this._outputEdges = this._stepInfo.Edges.ToDictionary(kvp => kvp.Key, kvp => kvp.Value.ToList()); + this._eventNamespace = $"{this._stepInfo.State.Name}_{this._stepInfo.State.Id}"; + } + + /// + /// The name of the step. + /// + internal string Name => this._stepInfo.State.Name!; + + /// + /// The Id of the step. + /// + internal string Id => this._stepInfo.State.Id!; + + /// + /// Retrieves all events that have been emitted by this step in the previous superstep. + /// + /// An where T is + internal IEnumerable GetAllEvents() + { + var allEvents = this._outgoingEventQueue.ToArray(); + this._outgoingEventQueue.Clear(); + return allEvents; + } + + /// + /// Retrieves all edges that are associated with the provided event Id. + /// + /// The event Id of interest. + /// A where T is + internal IEnumerable GetEdgeForEvent(string eventId) + { + if (this._outputEdges is null) + { + return []; + } + + if (this._outputEdges.TryGetValue(eventId, out List? edges) && edges is not null) + { + return edges; + } + + return []; + } + + /// + /// Emits an event from the step. + /// + /// The event to emit. + /// A + public override ValueTask EmitEventAsync(KernelProcessEvent processEvent) + { + this.EmitEvent(LocalEvent.FromKernelProcessEvent(processEvent, this._eventNamespace)); + return default; + } + + /// + /// Handles a that has been sent to the step. + /// + /// The message to process. + /// A + /// + internal virtual async Task HandleMessageAsync(LocalMessage message) + { + Verify.NotNull(message); + + // Lazy one-time initialization of the step before processing a message + await this._initializeTask.Value.ConfigureAwait(false); + + if (this._functions is null || this._inputs is null || this._initialInputs is null) + { + throw new KernelException("The step has not been initialized."); + } + + string messageLogParameters = string.Join(", ", message.Values.Select(kvp => $"{kvp.Key}: {kvp.Value}")); + this._logger?.LogDebug("Received message from '{SourceId}' targeting function '{FunctionName}' and parameters '{Parameters}'.", message.SourceId, message.FunctionName, messageLogParameters); + + // Add the message values to the inputs for the function + foreach (var kvp in message.Values) + { + if (this._inputs.TryGetValue(message.FunctionName, out Dictionary? functionName) && functionName != null && functionName.TryGetValue(kvp.Key, out object? parameterName) && parameterName != null) + { + this._logger?.LogWarning("Step {StepName} already has input for {FunctionName}.{Key}, it is being overwritten with a message from Step named '{SourceId}'.", this.Name, message.FunctionName, kvp.Key, message.SourceId); + } + + if (!this._inputs.TryGetValue(message.FunctionName, out Dictionary? functionParameters)) + { + this._inputs[message.FunctionName] = new(); + functionParameters = this._inputs[message.FunctionName]; + } + + functionParameters![kvp.Key] = kvp.Value; + } + + // If we're still waiting for inputs on all of our functions then don't do anything. + List invocableFunctions = this._inputs.Where(i => i.Value != null && i.Value.All(v => v.Value != null)).Select(i => i.Key).ToList(); + var missingKeys = this._inputs.Where(i => i.Value is null || i.Value.Any(v => v.Value is null)); + + if (invocableFunctions.Count == 0) + { + string missingKeysLog() => string.Join(", ", missingKeys.Select(k => $"{k.Key}: {string.Join(", ", k.Value?.Where(v => v.Value == null).Select(v => v.Key) ?? [])}")); + this._logger?.LogDebug("No invocable functions, missing keys: {MissingKeys}", missingKeysLog()); + return; + } + + // A message can only target one function and should not result in a different function being invoked. + var targetFunction = invocableFunctions.FirstOrDefault((name) => name == message.FunctionName) ?? + throw new InvalidOperationException($"A message targeting function '{message.FunctionName}' has resulted in a function named '{invocableFunctions.First()}' becoming invocable. Are the function names configured correctly?"); + + this._logger?.LogDebug("Step with Id `{StepId}` received all required input for function [{TargetFunction}] and is executing.", this.Name, targetFunction); + + // Concat all the inputs and run the function + KernelArguments arguments = new(this._inputs[targetFunction]!); + if (!this._functions.TryGetValue(targetFunction, out KernelFunction? function) || function == null) + { + throw new ArgumentException($"Function {targetFunction} not found in plugin {this.Name}"); + } + + FunctionResult? invokeResult = null; + string? eventName = null; + object? eventValue = null; + + // Invoke the function, catching all exceptions that it may throw, and then post the appropriate event. +#pragma warning disable CA1031 // Do not catch general exception types + try + { + invokeResult = await this.InvokeFunction(function, this._kernel, arguments).ConfigureAwait(false); + eventName = $"{targetFunction}.OnResult"; + eventValue = invokeResult?.GetValue(); + } + catch (Exception ex) + { + this._logger?.LogError("Error in Step {StepName}: {ErrorMessage}", this.Name, ex.Message); + eventName = $"{targetFunction}.OnError"; + eventValue = ex.Message; + } + finally + { + await this.EmitEventAsync(new KernelProcessEvent { Id = eventName, Data = eventValue }).ConfigureAwait(false); + + // Reset the inputs for the function that was just executed + this._inputs[targetFunction] = new(this._initialInputs[targetFunction] ?? []); + } +#pragma warning restore CA1031 // Do not catch general exception types + } + + /// + /// Initializes the step with the provided step information. + /// + /// A + /// + protected virtual async ValueTask InitializeStepAsync() + { + // Instantiate an instance of the inner step object + KernelProcessStep stepInstance = (KernelProcessStep)ActivatorUtilities.CreateInstance(this._kernel.Services, this._stepInfo.InnerStepType); + var kernelPlugin = KernelPluginFactory.CreateFromObject(stepInstance, pluginName: this._stepInfo.State.Name!); + + // Load the kernel functions + foreach (KernelFunction f in kernelPlugin) + { + this._functions.Add(f.Name, f); + } + + // Initialize the input channels + this._initialInputs = this.FindInputChannels(); + this._inputs = this._initialInputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)); + + // Activate the step with user-defined state if needed + KernelProcessStepState? stateObject = null; + Type? stateType = null; + + if (TryGetSubtypeOfStatefulStep(this._stepInfo.InnerStepType, out Type? genericStepType) && genericStepType is not null) + { + // The step is a subclass of KernelProcessStep<>, so we need to extract the generic type argument + // and create an instance of the corresponding KernelProcessStepState<>. + var userStateType = genericStepType.GetGenericArguments()[0]; + if (userStateType is null) + { + var errorMessage = "The generic type argument for the KernelProcessStep subclass could not be determined."; + this._logger?.LogError("{ErrorMessage}", errorMessage); + throw new KernelException(errorMessage); + } + + stateType = typeof(KernelProcessStepState<>).MakeGenericType(userStateType); + if (stateType is null) + { + var errorMessage = "The generic type argument for the KernelProcessStep subclass could not be determined."; + this._logger?.LogError("{ErrorMessage}", errorMessage); + throw new KernelException(errorMessage); + } + + stateObject = (KernelProcessStepState?)Activator.CreateInstance(stateType, this.Name, this.Id); + } + else + { + // The step is a KernelProcessStep with no user-defined state, so we can use the base KernelProcessStepState. + stateType = typeof(KernelProcessStepState); + stateObject = new KernelProcessStepState(this.Name, this.Id); + } + + if (stateObject is null) + { + var errorMessage = "The state object for the KernelProcessStep could not be created."; + this._logger?.LogError("{ErrorMessage}", errorMessage); + throw new KernelException(errorMessage); + } + + MethodInfo? methodInfo = this._stepInfo.InnerStepType.GetMethod(nameof(KernelProcessStep.ActivateAsync), [stateType]); + + if (methodInfo is null) + { + var errorMessage = "The ActivateAsync method for the KernelProcessStep could not be found."; + this._logger?.LogError("{ErrorMessage}", errorMessage); + throw new KernelException(errorMessage); + } + + this._stepState = stateObject; + methodInfo.Invoke(stepInstance, [stateObject]); + await stepInstance.ActivateAsync(stateObject).ConfigureAwait(false); + } + + /// + /// Examines the KernelFunction for the step and creates a dictionary of input channels. + /// Some types such as KernelProcessStepContext are special and need to be injected into + /// the function parameter. Those objects are instantiated at this point. + /// + /// + /// + private Dictionary?> FindInputChannels() + { + if (this._functions is null) + { + var errorMessage = "Internal Error: The step has not been initialized."; + this._logger?.LogError("{ErrorMessage}", errorMessage); + throw new KernelException(errorMessage); + } + + Dictionary?> inputs = new(); + foreach (var kvp in this._functions) + { + inputs[kvp.Key] = new(); + foreach (var param in kvp.Value.Metadata.Parameters) + { + // Optional parameters are should not be added to the input dictionary. + if (!param.IsRequired) + { + continue; + } + + // Parameters of type KernelProcessStepContext are injected by the process + // and are instantiated here. + if (param.ParameterType == typeof(KernelProcessStepContext)) + { + inputs[kvp.Key]![param.Name] = new KernelProcessStepContext(this); + } + else + { + inputs[kvp.Key]![param.Name] = null; + } + } + } + + return inputs; + } + + /// + /// Attempts to find an instance of ']]> within the provided types hierarchy. + /// + /// The type to examine. + /// The matching type if found, otherwise null. + /// True if a match is found, false otherwise. + /// TODO: Move this to a share process utilities project. + private static bool TryGetSubtypeOfStatefulStep(Type? type, out Type? genericStateType) + { + while (type != null && type != typeof(object)) + { + if (type.IsGenericType && type.GetGenericTypeDefinition() == s_genericType) + { + genericStateType = type; + return true; + } + + type = type.BaseType; + } + + genericStateType = null; + return false; + } + + /// + /// Invokes the provides function with the provided kernel and arguments. + /// + /// The function to invoke. + /// The kernel to use for invocation. + /// The arguments to invoke with. + /// A containing the result of the function invocation. + private Task InvokeFunction(KernelFunction function, Kernel kernel, KernelArguments arguments) + { + return kernel.InvokeAsync(function, arguments: arguments); + } + + /// + /// Extracts the current state of the step and returns it as a . + /// + /// An instance of + internal virtual async Task ToKernelProcessStepInfoAsync() + { + // Lazy one-time initialization of the step before extracting state information. + // This allows state information to be extracted even if the step has not been activated. + await this._initializeTask.Value.ConfigureAwait(false); + + var stepInfo = new KernelProcessStepInfo(this._stepInfo.InnerStepType, this._stepState!, this._outputEdges); + return stepInfo; + } + + /// + /// Emits an event from the step. + /// + /// The event to emit. + protected void EmitEvent(LocalEvent localEvent) + { + var scopedEvent = this.ScopedEvent(localEvent); + this._outgoingEventQueue.Enqueue(scopedEvent); + } + + /// + /// Generates a scoped event for the step. + /// + /// The event. + /// A with the correctly scoped namespace. + protected LocalEvent ScopedEvent(LocalEvent localEvent) + { + Verify.NotNull(localEvent); + return localEvent with { Namespace = $"{this.Name}_{this.Id}" }; + } + + /// + /// Generates a scoped event for the step. + /// + /// The event. + /// A with the correctly scoped namespace. + protected LocalEvent ScopedEvent(KernelProcessEvent processEvent) + { + Verify.NotNull(processEvent); + return LocalEvent.FromKernelProcessEvent(processEvent, $"{this.Name}_{this.Id}"); + } +} diff --git a/dotnet/src/Experimental/Process.LocalRuntime/Process.LocalRuntime.csproj b/dotnet/src/Experimental/Process.LocalRuntime/Process.LocalRuntime.csproj new file mode 100644 index 000000000000..5c89f5fb3d0b --- /dev/null +++ b/dotnet/src/Experimental/Process.LocalRuntime/Process.LocalRuntime.csproj @@ -0,0 +1,36 @@ +๏ปฟ + + + + Microsoft.SemanticKernel.Process.LocalRuntime + Microsoft.SemanticKernel.Process + net8.0;netstandard2.0 + false + alpha + + + + + + + + Semantic Kernel Process - LocalRuntime + Semantic Kernel Process LocalRuntime. This package is automatically installed by Semantic Kernel Process packages if needed. + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dotnet/src/Experimental/Process.UnitTests/KernelProcessStateTests.cs b/dotnet/src/Experimental/Process.UnitTests/KernelProcessStateTests.cs new file mode 100644 index 000000000000..345c420ec923 --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/KernelProcessStateTests.cs @@ -0,0 +1,59 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Xunit; + +namespace Microsoft.SemanticKernel.UnitTests; + +/// +/// Unit testing of . +/// +public class KernelProcessStateTests +{ + /// + /// Verify initialization of . + /// + [Fact] + public void KernelProcessStateInitializationSetsPropertiesCorrectly() + { + // Arrange + string name = "TestProcess"; + string id = "123"; + + // Act + var state = new KernelProcessState(name, id); + + // Assert + Assert.Equal(name, state.Name); + Assert.Equal(id, state.Id); + } + + /// + /// Verify initialization of with null id. + /// + [Fact] + public void KernelProcessStateInitializationWithNullIdSucceeds() + { + // Arrange + string name = "TestProcess"; + + // Act + var state = new KernelProcessState(name); + + // Assert + Assert.Equal(name, state.Name); + Assert.Null(state.Id); + } + + /// + /// Verify initialization of with null name throws. + /// + [Fact] + public void KernelProcessStateInitializationWithNullNameThrows() + { + // Act & Assert +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. + var ex = Assert.Throws(() => new KernelProcessState(name: null)); +#pragma warning restore CS8625 // Cannot convert null literal to non-nullable reference type. + } +} diff --git a/dotnet/src/Experimental/Process.UnitTests/LocalProcessTests.cs b/dotnet/src/Experimental/Process.UnitTests/LocalProcessTests.cs new file mode 100644 index 000000000000..b6070cca44c1 --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/LocalProcessTests.cs @@ -0,0 +1,109 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Xunit; + +namespace Microsoft.SemanticKernel.Process.UnitTests; + +/// +/// Unit tests for the class. +/// +public class LocalProcessTests +{ + /// + /// Validates that the constructor initializes the steps correctly. + /// + [Fact] + public async Task ExecuteAsyncInitializesCorrectlyAsync() + { + // Arrange + var processState = new KernelProcessState(name: "TestProcess", id: "123"); + var mockKernelProcess = new KernelProcess(processState, + [ + new(typeof(TestStep), new KernelProcessState(name: "Step1", id: "1"), []), + new(typeof(TestStep), new KernelProcessState(name: "Step2", id: "2"), []) + ], []); + + var mockKernel = new Kernel(); + using var localProcess = new LocalProcess(mockKernelProcess, mockKernel, loggerFactory: null); + + // Act + await localProcess.StartAsync(); + + // Assert + Assert.Equal(2, localProcess._steps.Count); + Assert.Contains(localProcess._steps, s => s.Name == "Step1"); + Assert.Contains(localProcess._steps, s => s.Name == "Step2"); + } + + /// + /// Validates that the assigns and Id to the process if one is not already set. + /// + [Fact] + public void ProcessWithMissingIdIsAssignedAnId() + { + // Arrange + var mockKernel = new Kernel(); + var processState = new KernelProcessState(name: "TestProcess"); + var mockKernelProcess = new KernelProcess(processState, + [ + new(typeof(TestStep), new KernelProcessState(name: "Step1", id: "1"), []), + new(typeof(TestStep), new KernelProcessState(name: "Step2", id: "2"), []) + ], []); + + // Act + using var localProcess = new LocalProcess(mockKernelProcess, mockKernel, loggerFactory: null); + + // Assert + Assert.NotEmpty(localProcess.Id); + } + + /// + /// Validates that the assigns and Id to the process if one is not already set. + /// + [Fact] + public void ProcessWithAssignedIdIsNotOverwrittenId() + { + // Arrange + var mockKernel = new Kernel(); + var processState = new KernelProcessState(name: "TestProcess", id: "AlreadySet"); + var mockKernelProcess = new KernelProcess(processState, + [ + new(typeof(TestStep), new KernelProcessState(name: "Step1", id: "1"), []), + new(typeof(TestStep), new KernelProcessState(name: "Step2", id: "2"), []) + ], []); + + // Act + using var localProcess = new LocalProcess(mockKernelProcess, mockKernel, loggerFactory: null); + + // Assert + Assert.NotEmpty(localProcess.Id); + Assert.Equal("AlreadySet", localProcess.Id); + } + + /// + /// A class that represents a step for testing. + /// + private sealed class TestStep : KernelProcessStep + { + /// + /// The name of the step. + /// + public static string Name => "TestStep"; + + /// + /// A method that represents a function for testing. + /// + [KernelFunction] + public void TestFunction() + { + } + } + + /// + /// A class that represents a state for testing. + /// + private sealed class TestState + { + } +} diff --git a/dotnet/src/Experimental/Process.UnitTests/Process.UnitTests.csproj b/dotnet/src/Experimental/Process.UnitTests/Process.UnitTests.csproj new file mode 100644 index 000000000000..23191103b49c --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/Process.UnitTests.csproj @@ -0,0 +1,39 @@ +๏ปฟ + + + SemanticKernel.Process.UnitTests + SemanticKernel.Process.UnitTests + net8.0 + + LatestMajor + true + false + 12 + + $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0080,SKEXP0110;OPENAI001 + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + diff --git a/dotnet/src/Experimental/Process.UnitTests/ProcessBuilderTests.cs b/dotnet/src/Experimental/Process.UnitTests/ProcessBuilderTests.cs new file mode 100644 index 000000000000..de3a9df15159 --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/ProcessBuilderTests.cs @@ -0,0 +1,127 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +namespace Microsoft.SemanticKernel.UnitTests; + +/// +/// Unit tests for the ProcessBuilder class. +/// +public class ProcessBuilderTests +{ + private const string ProcessName = "TestProcess"; + private const string StepName = "TestStep"; + private const string EventId = "TestEvent"; + private const string SubProcessName = "SubProcess"; + + /// + /// Tests the initialization of the ProcessBuilder. + /// + [Fact] + public void ProcessBuilderInitialization() + { + // Arrange & Act + var processBuilder = new ProcessBuilder(ProcessName); + + // Assert + Assert.Equal(ProcessName, processBuilder.Name); + Assert.Empty(processBuilder.Steps); + } + + /// + /// Tests the AddStepFromType method to ensure it adds a step correctly. + /// + [Fact] + public void AddStepFromTypeAddsStep() + { + // Arrange + var processBuilder = new ProcessBuilder(ProcessName); + + // Act + var stepBuilder = processBuilder.AddStepFromType(StepName); + + // Assert + Assert.Single(processBuilder.Steps); + Assert.Equal(StepName, stepBuilder.Name); + } + + /// + /// Tests the AddStepFromProcess method to ensure it adds a sub-process correctly. + /// + [Fact] + public void AddStepFromProcessAddsSubProcess() + { + // Arrange + var processBuilder = new ProcessBuilder(ProcessName); + var subProcessBuilder = new ProcessBuilder(SubProcessName); + + // Act + var stepBuilder = processBuilder.AddStepFromProcess(subProcessBuilder); + + // Assert + Assert.Single(processBuilder.Steps); + Assert.Equal(SubProcessName, stepBuilder.Name); + } + + /// + /// Tests the OnExternalEvent method to ensure it creates an edge builder correctly. + /// + [Fact] + public void OnExternalEventCreatesEdgeBuilder() + { + // Arrange + var processBuilder = new ProcessBuilder(ProcessName); + + // Act + var edgeBuilder = processBuilder.OnInputEvent(EventId); + + // Assert + Assert.NotNull(edgeBuilder); + Assert.Equal(EventId, edgeBuilder.EventId); + } + + /// + /// Tests the Build method to ensure it creates a KernelProcess correctly. + /// + [Fact] + public void BuildCreatesKernelProcess() + { + // Arrange + var processBuilder = new ProcessBuilder(ProcessName); + processBuilder.AddStepFromType(StepName); + + // Act + var kernelProcess = processBuilder.Build(); + + // Assert + Assert.NotNull(kernelProcess); + Assert.Equal(ProcessName, kernelProcess.State.Name); + Assert.Single(kernelProcess.Steps); + } + + /// + /// A class that represents a step for testing. + /// + private sealed class TestStep : KernelProcessStep + { + /// + /// The name of the step. + /// + public static string Name => "TestStep"; + + /// + /// A method that represents a function for testing. + /// + [KernelFunction] + public void TestFunction() + { + } + } + + /// + /// A class that represents a state for testing. + /// + private sealed class TestState + { + } +} diff --git a/dotnet/src/Experimental/Process.UnitTests/ProcessEdgeBuilderTests.cs b/dotnet/src/Experimental/Process.UnitTests/ProcessEdgeBuilderTests.cs new file mode 100644 index 000000000000..47c81817a596 --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/ProcessEdgeBuilderTests.cs @@ -0,0 +1,27 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +namespace Microsoft.SemanticKernel.UnitTests; + +/// +/// Unit testing of . +/// +public class ProcessEdgeBuilderTests +{ + /// + /// Verify initialization of . + /// + [Fact] + public void ProcessEdgeBuilderInitialization() + { + // Arrange + var processBuilder = new ProcessBuilder("TestProcess"); + + // Act + var edgeBuilder = new ProcessEdgeBuilder(processBuilder, "TestEvent"); + + // Assert + Assert.NotNull(edgeBuilder); + } +} diff --git a/dotnet/src/Experimental/Process.UnitTests/ProcessStepBuilderTests.cs b/dotnet/src/Experimental/Process.UnitTests/ProcessStepBuilderTests.cs new file mode 100644 index 000000000000..704a145536f6 --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/ProcessStepBuilderTests.cs @@ -0,0 +1,226 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using Xunit; + +namespace Microsoft.SemanticKernel.Tests; + +/// +/// Unit tests for the class. +/// +public class ProcessStepBuilderTests +{ + /// + /// Verify the constructor initializes properties. + /// + [Fact] + public void ConstructorShouldInitializeProperties() + { + // Arrange + var name = "TestStep"; + + // Act + var stepBuilder = new TestProcessStepBuilder(name); + + // Assert + Assert.Equal(name, stepBuilder.Name); + Assert.NotNull(stepBuilder.Id); + Assert.NotNull(stepBuilder.FunctionsDict); + Assert.NotNull(stepBuilder.Edges); + } + + /// + /// Verify that the method returns a . + /// + [Fact] + public void OnEventShouldReturnProcessStepEdgeBuilder() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + + // Act + var edgeBuilder = stepBuilder.OnEvent("TestEvent"); + + // Assert + Assert.NotNull(edgeBuilder); + } + + /// + /// Verify that the method returns a . + /// + [Fact] + public void OnFunctionResultShouldReturnProcessStepEdgeBuilder() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + + // Act + var edgeBuilder = stepBuilder.OnFunctionResult("TestFunction"); + + // Assert + Assert.NotNull(edgeBuilder); + } + + /// + /// Verify that the method adds an edge. + /// + [Fact] + public void LinkToShouldAddEdge() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + var edgeBuilder = new ProcessStepEdgeBuilder(stepBuilder, "TestEvent"); + + // Act + stepBuilder.LinkTo("TestEvent", edgeBuilder); + + // Assert + Assert.True(stepBuilder.Edges.ContainsKey("TestEvent")); + Assert.Contains(edgeBuilder, stepBuilder.Edges["TestEvent"]); + } + + /// + /// Verify that the method throws an exception when no functions exist. + /// + [Fact] + public void ResolveFunctionTargetShouldThrowExceptionWhenNoFunctionsExist() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + + // Act & Assert + Assert.Throws(() => stepBuilder.ResolveFunctionTarget(null, null)); + } + + /// + /// Verify that the method correctly resolves a function target. + /// In this case, the function name is provided and the parameter name is not. The target function has no parameters. + /// + [Fact] + public void ResolveFunctionTargetWithoutParameterShouldReturnFunctionTargetWhenNoneExist() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + stepBuilder.FunctionsDict["TestFunction"] = new KernelFunctionMetadata(name: "TestFunction") + { + Description = "Test function description", + Parameters = new List() + }; + + // Act + var target = stepBuilder.ResolveFunctionTarget("TestFunction", null); + + // Assert + Assert.NotNull(target); + Assert.Equal("TestFunction", target.FunctionName); + } + + /// + /// Verify that the method correctly resolves a function target. + /// In this case, the function name is provided and the parameter name is not. The target function has one parameters. + /// + [Fact] + public void ResolveFunctionTargetWithoutParameterShouldReturnFunctionTargetWhenOnlyOneParameterExists() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + stepBuilder.FunctionsDict["TestFunction"] = new KernelFunctionMetadata(name: "TestFunction") + { + Description = "Test function description", + Parameters = [new KernelParameterMetadata("param1")] + }; + + // Act + var target = stepBuilder.ResolveFunctionTarget("TestFunction", null); + + // Assert + Assert.NotNull(target); + Assert.Equal("TestFunction", target.FunctionName); + Assert.Equal("param1", target.ParameterName); + } + + /// + /// Verify that the method throws when it cannot resolve. + /// In this case, the function name is provided and the parameter name is not. The target function has more than one parameters. + /// + [Fact] + public void ResolveFunctionTargetWithoutParameterShouldThrowWhenCannotResolveParameter() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + stepBuilder.FunctionsDict["TestFunction"] = new KernelFunctionMetadata(name: "TestFunction") + { + Description = "Test function description", + Parameters = [new KernelParameterMetadata("param1"), new KernelParameterMetadata("param2")] + }; + + // Act & Assert + Assert.Throws(() => stepBuilder.ResolveFunctionTarget("TestFunction", null)); + } + + /// + /// Verify that the method correctly resolves a function target. + /// In this case, the function name is not provided, nor is the parameter name. The target function has one function with one parameter. + /// + [Fact] + public void ResolveFunctionTargetWithoutParameterShouldReturnFunctionTargetWhenOnlyOneFunctionExists() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + stepBuilder.FunctionsDict["TestFunction"] = new KernelFunctionMetadata(name: "TestFunction") + { + Description = "Test function description", + Parameters = [new KernelParameterMetadata("param1")] + }; + + // Act + var target = stepBuilder.ResolveFunctionTarget(null, null); + + // Assert + Assert.NotNull(target); + Assert.Equal("TestFunction", target.FunctionName); + Assert.Equal("param1", target.ParameterName); + } + + /// + /// Verify that the method throws when it cannot resolve. + /// In this case, the function name is provided as is the parameter name. The target has more than one function. + /// + [Fact] + public void ResolveFunctionTargetWithoutParameterShouldThrowWhenCannotResolveFunction() + { + // Arrange + var stepBuilder = new TestProcessStepBuilder("TestStep"); + stepBuilder.FunctionsDict["TestFunction1"] = new KernelFunctionMetadata(name: "TestFunction1") + { + Description = "Test function description", + Parameters = [new KernelParameterMetadata("param1")] + }; + stepBuilder.FunctionsDict["TestFunction2"] = new KernelFunctionMetadata(name: "TestFunction2") + { + Description = "Test function description", + Parameters = [new KernelParameterMetadata("param1")] + }; + + // Act & Assert + Assert.Throws(() => stepBuilder.ResolveFunctionTarget(null, null)); + } + + /// + /// A test implementation of for testing purposes. + /// + private sealed class TestProcessStepBuilder : ProcessStepBuilder + { + public TestProcessStepBuilder(string name) : base(name) { } + + internal override KernelProcessStepInfo BuildStep() + { + return new KernelProcessStepInfo(typeof(TestProcessStepBuilder), new KernelProcessStepState(this.Name, this.Id), []); + } + + internal override Dictionary GetFunctionMetadataMap() + { + return new Dictionary(); + } + } +} diff --git a/dotnet/src/Experimental/Process.UnitTests/ProcessStepEdgeBuilderTests.cs b/dotnet/src/Experimental/Process.UnitTests/ProcessStepEdgeBuilderTests.cs new file mode 100644 index 000000000000..383215bc6d0c --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/ProcessStepEdgeBuilderTests.cs @@ -0,0 +1,153 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Xunit; + +namespace Microsoft.SemanticKernel.Tests; + +/// +/// Unit tests for the class. +/// +public class ProcessStepEdgeBuilderTests +{ + /// + /// Verify the constructor initializes properties. + /// + [Fact] + public void ConstructorShouldInitializeProperties() + { + // Arrange + var source = new ProcessStepBuilder(TestStep.Name); + var eventType = "Event1"; + + // Act + var builder = new ProcessStepEdgeBuilder(source, eventType); + + // Assert + Assert.Equal(source, builder.Source); + Assert.Equal(eventType, builder.EventId); + } + + /// + /// Verify that the method sets the output target. + /// + [Fact] + public void SendEventToShouldSetOutputTarget() + { + // Arrange + var source = new ProcessStepBuilder(TestStep.Name); + var eventType = "Event1"; + var builder = new ProcessStepEdgeBuilder(source, eventType); + var outputTarget = new ProcessFunctionTargetBuilder(source); + + // Act + builder.SendEventTo(outputTarget); + + // Assert + Assert.Equal(outputTarget, builder.Target); // Assuming GetOutputTarget() is a method to access _outputTarget + } + + /// + /// Verify that the method throws if the output target is already set. + /// + [Fact] + public void SendEventToShouldThrowIfOutputTargetAlreadySet() + { + // Arrange + var source = new ProcessStepBuilder(TestStep.Name); + var eventType = "Event1"; + var builder = new ProcessStepEdgeBuilder(source, eventType); + var outputTarget1 = new ProcessFunctionTargetBuilder(source); + var outputTarget2 = new ProcessFunctionTargetBuilder(source); + + // Act + builder.SendEventTo(outputTarget1); + + // Assert + Assert.Throws(() => builder.SendEventTo(outputTarget2)); + } + + /// + /// Verify that the method sets the output target to the end step. + /// + [Fact] + public void StopProcessShouldSetOutputTargetToEndStep() + { + // Arrange + var source = new ProcessStepBuilder(TestStep.Name); + var eventType = "Event1"; + var builder = new ProcessStepEdgeBuilder(source, eventType); + + // Act + builder.StopProcess(); + + // Assert + Assert.Equal(EndStep.Instance, builder.Target?.Step); + } + + /// + /// Verify that the method throws if the output target is already set. + /// + [Fact] + public void StopProcessShouldThrowIfOutputTargetAlreadySet() + { + // Arrange + var source = new ProcessStepBuilder(TestStep.Name); + var eventType = "Event1"; + var builder = new ProcessStepEdgeBuilder(source, eventType); + var outputTarget = new ProcessFunctionTargetBuilder(source); + + // Act + builder.SendEventTo(outputTarget); + + // Assert + Assert.Throws(() => builder.StopProcess()); + } + + /// + /// Verify that the method returns a . + /// + [Fact] + public void BuildShouldReturnKernelProcessEdge() + { + // Arrange + var source = new ProcessStepBuilder(TestStep.Name); + var eventType = "Event1"; + var builder = new ProcessStepEdgeBuilder(source, eventType); + var outputTarget = new ProcessFunctionTargetBuilder(source); + builder.SendEventTo(outputTarget); + + // Act + var edge = builder.Build(); + + // Assert + Assert.NotNull(edge); + Assert.Equal(source.Id, edge.SourceStepId); + } + + /// + /// A class that represents a step for testing. + /// + private sealed class TestStep : KernelProcessStep + { + /// + /// The name of the step. + /// + public static string Name => "TestStep"; + + /// + /// A method that represents a function for testing. + /// + [KernelFunction] + public void TestFunction() + { + } + } + + /// + /// A class that represents a state for testing. + /// + private sealed class TestState + { + } +} diff --git a/dotnet/src/Experimental/Process.UnitTests/ProcessTypeExtensionsTests.cs b/dotnet/src/Experimental/Process.UnitTests/ProcessTypeExtensionsTests.cs new file mode 100644 index 000000000000..3c81087f4eeb --- /dev/null +++ b/dotnet/src/Experimental/Process.UnitTests/ProcessTypeExtensionsTests.cs @@ -0,0 +1,106 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.SemanticKernel.Process; +using Xunit; + +namespace Microsoft.SemanticKernel.Tests; + +/// +/// Unit tests for the class. +/// +public class ProcessTypeExtensionsTests +{ + private sealed class TestState { } + private class TestStep : KernelProcessStep { } + private sealed class DerivedTestStep : TestStep { } + private sealed class NonStep { } + private sealed class NonGenericStep : KernelProcessStep { } + + /// + /// Verify that TryGetSubtypeOfStatefulStep returns true and the correct type when the type is a direct subtype of KernelProcessStep. + /// + [Fact] + public void TryGetSubtypeOfStatefulStepDirectSubtypeReturnsTrue() + { + // Arrange + Type type = typeof(TestStep); + + // Act + bool result = type.TryGetSubtypeOfStatefulStep(out Type? genericStateType); + + // Assert + Assert.True(result); + Assert.NotNull(genericStateType); + Assert.Equal(typeof(KernelProcessStep), genericStateType); + } + + /// + /// Verify that TryGetSubtypeOfStatefulStep returns true and the correct type when the type is a subtype of a subtype of KernelProcessStep. + /// + [Fact] + public void TryGetSubtypeOfStatefulStepInheritedSubtypeReturnsTrue() + { + // Arrange + Type type = typeof(DerivedTestStep); + + // Act + bool result = type.TryGetSubtypeOfStatefulStep(out Type? genericStateType); + + // Assert + Assert.True(result); + Assert.NotNull(genericStateType); + Assert.Equal(typeof(KernelProcessStep), genericStateType); + } + + /// + /// Verify that TryGetSubtypeOfStatefulStep returns false when the type is not a subtype of KernelProcessStep. + /// + [Fact] + public void TryGetSubtypeOfStatefulStepNotASubtypeReturnsFalse() + { + // Arrange + Type type = typeof(NonStep); + + // Act + bool result = type.TryGetSubtypeOfStatefulStep(out Type? genericStateType); + + // Assert + Assert.False(result); + Assert.Null(genericStateType); + } + + /// + /// Verify that TryGetSubtypeOfStatefulStep returns false when the type is not a subtype of KernelProcessStep. + /// + [Fact] + public void TryGetSubtypeOfStatefulStepNotAGenericSubtypeReturnsFalse() + { + // Arrange + Type type = typeof(NonGenericStep); + + // Act + bool result = type.TryGetSubtypeOfStatefulStep(out Type? genericStateType); + + // Assert + Assert.False(result); + Assert.Null(genericStateType); + } + + /// + /// Verify that TryGetSubtypeOfStatefulStep returns false when the type is null. + /// + [Fact] + public void TryGetSubtypeOfStatefulStepNullTypeReturnsFalse() + { + // Arrange + Type? type = null; + + // Act + bool result = type.TryGetSubtypeOfStatefulStep(out Type? genericStateType); + + // Assert + Assert.False(result); + Assert.Null(genericStateType); + } +} diff --git a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs index 3f0822dd01db..6a36cd8f5073 100644 --- a/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs +++ b/dotnet/src/Extensions/Extensions.UnitTests/PromptTemplates/Handlebars/Helpers/KernelFunctionHelpersTests.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.ComponentModel; using System.Globalization; using System.Threading.Tasks; @@ -90,6 +91,21 @@ public async Task ItRendersFunctionHelpersWitHashArgumentsAsync() Assert.Equal("BazBar", result); } + [Fact] + public async Task ItRendersFunctionHelpersWitHashArgumentsAndInputVariableAsync() + { + // Arrange and Act + const string VarName = "param_x"; + var template = """{{Foo-StringifyInt (""" + VarName + """)}}"""; + var inputVariables = new List { new() { Name = VarName } }; + var arguments = new KernelArguments { [VarName] = 5 }; + + var result = await this.RenderPromptTemplateAsync(template, inputVariables, arguments); + + // Assert + Assert.Equal("5", result); + } + [Fact] public async Task ShouldThrowExceptionWhenMissingRequiredParameterAsync() { @@ -123,6 +139,34 @@ public async Task ShouldThrowExceptionWhenFunctionHelperHasInvalidParameterTypeA Assert.Contains("Invalid argument type", exception.Message, StringComparison.CurrentCultureIgnoreCase); } + [Fact] + public async Task ShouldThrowExceptionWhenFunctionHasNullPositionalParameterAsync() + { + // Arrange and Act + var template = """{{Foo-StringifyInt (nullParameter)}}"""; + var inputVariables = new List { new() { Name = "nullParameter" } }; + var arguments = new KernelArguments { ["nullParameter"] = null }; + + // Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template, inputVariables, arguments)); + Assert.Contains("Invalid parameter type for function", exception.Message, StringComparison.CurrentCultureIgnoreCase); + Assert.Contains("", exception.Message, StringComparison.CurrentCultureIgnoreCase); + } + + [Fact] + public async Task ShouldThrowExceptionWhenFunctionHasNullHashParameterAsync() + { + // Arrange and Act + var template = """{{Foo-StringifyInt x=(nullParameter)}}"""; + var inputVariables = new List { new() { Name = "nullParameter" } }; + var arguments = new KernelArguments { ["nullParameter"] = null }; + + // Assert + var exception = await Assert.ThrowsAsync(() => this.RenderPromptTemplateAsync(template, inputVariables, arguments)); + Assert.Contains("Invalid argument type for function", exception.Message, StringComparison.CurrentCultureIgnoreCase); + Assert.Contains("", exception.Message, StringComparison.CurrentCultureIgnoreCase); + } + [Fact] public async Task ShouldThrowExceptionWhenFunctionHelperIsNotDefinedAsync() { @@ -176,15 +220,20 @@ public async Task ItCanReturnCustomReturnTypeAsync() private readonly Kernel _kernel; private readonly KernelArguments _arguments; - private async Task RenderPromptTemplateAsync(string template) + private async Task RenderPromptTemplateAsync(string template, List? inputVariables = null, KernelArguments? arguments = null) { // Arrange this._kernel.ImportPluginFromObject(new Foo()); var resultConfig = InitializeHbPromptConfig(template); + if (inputVariables != null) + { + resultConfig.InputVariables = inputVariables; + } + var target = (HandlebarsPromptTemplate)this._factory.Create(resultConfig); // Act - var result = await target.RenderAsync(this._kernel, this._arguments); + var result = await target.RenderAsync(this._kernel, arguments ?? this._arguments); return result; } diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs index 9cb98b446e68..8dbca53a799c 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs @@ -101,8 +101,13 @@ private static void RegisterFunctionAsHelper( /// /// Function parameter metadata. /// Handlebar argument. - private static bool IsExpectedParameterType(KernelParameterMetadata parameterMetadata, object argument) + private static bool IsExpectedParameterType(KernelParameterMetadata parameterMetadata, object? argument) { + if (argument == null) + { + return false; + } + var actualParameterType = parameterMetadata.ParameterType is Type parameterType && Nullable.GetUnderlyingType(parameterType) is Type underlyingType ? underlyingType : parameterMetadata.ParameterType; @@ -140,13 +145,13 @@ private static void ProcessHashArguments( if (handlebarsArguments is not null && (handlebarsArguments.TryGetValue(fullyQualifiedParamName, out var value) || handlebarsArguments.TryGetValue(param.Name, out value))) { value = KernelHelpersUtils.GetArgumentValue(value, executionContext); - if (value is not null && IsExpectedParameterType(param, value)) + if (IsExpectedParameterType(param, value)) { executionContext[param.Name] = value; } else { - throw new KernelException($"Invalid argument type for function {functionMetadata.Name}. Parameter {param.Name} expects type {param.ParameterType ?? (object?)param.Schema} but received {value?.GetType()}."); + throw new KernelException($"Invalid argument type for function {functionMetadata.Name}. Parameter {param.Name} expects type {param.ParameterType ?? (object?)param.Schema} but received {value?.GetType().ToString() ?? ""}."); } } else if (param.IsRequired) @@ -180,7 +185,7 @@ private static void ProcessPositionalArguments(KernelFunctionMetadata functionMe } else { - throw new KernelException($"Invalid parameter type for function {functionMetadata.Name}. Parameter {param.Name} expects type {param.ParameterType ?? (object?)param.Schema} but received {arg.GetType()}."); + throw new KernelException($"Invalid parameter type for function {functionMetadata.Name}. Parameter {param.Name} expects type {param.ParameterType ?? (object?)param.Schema} but received {arg?.GetType().ToString() ?? ""}."); } } } diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj b/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj index aa6f9eb848c8..d5e3b2fc9e4b 100644 --- a/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj +++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/PromptTemplates.Handlebars.csproj @@ -9,6 +9,10 @@ true + + rc + + diff --git a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs index 1009968fa0e7..7716eb7cac93 100644 --- a/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.OpenApi.Extensions/Extensions/ApiManifestKernelExtensions.cs @@ -128,8 +128,6 @@ public static async Task CreatePluginFromApiManifestAsync( var predicate = OpenApiFilterService.CreatePredicate(null, null, requestUrls, openApiDocument); var filteredOpenApiDocument = OpenApiFilterService.CreateFilteredDocument(openApiDocument, predicate); - var serverUrl = filteredOpenApiDocument.Servers.FirstOrDefault()?.Url; - var openApiFunctionExecutionParameters = pluginParameters?.FunctionExecutionParameters?.ContainsKey(apiName) == true ? pluginParameters.FunctionExecutionParameters[apiName] : null; @@ -145,17 +143,18 @@ public static async Task CreatePluginFromApiManifestAsync( openApiFunctionExecutionParameters?.EnableDynamicPayload ?? true, openApiFunctionExecutionParameters?.EnablePayloadNamespacing ?? false); - if (serverUrl is not null) + var server = filteredOpenApiDocument.Servers.FirstOrDefault(); + if (server?.Url is not null) { foreach (var path in filteredOpenApiDocument.Paths) { - var operations = OpenApiDocumentParser.CreateRestApiOperations(serverUrl, path.Key, path.Value, null, logger); + var operations = OpenApiDocumentParser.CreateRestApiOperations(server, path.Key, path.Value, null, logger); foreach (RestApiOperation operation in operations) { try { logger.LogTrace("Registering Rest function {0}.{1}", pluginName, operation.Id); - functions.Add(OpenApiKernelPluginFactory.CreateRestApiFunction(pluginName, runner, operation, openApiFunctionExecutionParameters, new Uri(serverUrl), loggerFactory)); + functions.Add(OpenApiKernelPluginFactory.CreateRestApiFunction(pluginName, runner, operation, openApiFunctionExecutionParameters, new Uri(server.Url), loggerFactory)); } catch (Exception ex) when (!ex.IsCriticalException()) { diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs index af65b1c59825..77298a9c86af 100644 --- a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs @@ -50,9 +50,9 @@ public sealed class RestApiOperation public HttpMethod Method { get; } /// - /// The server URL. + /// The server. /// - public Uri? ServerUrl { get; } + public RestApiOperationServer Server { get; } /// /// The operation parameters. @@ -78,7 +78,7 @@ public sealed class RestApiOperation /// Creates an instance of a class. /// /// The operation identifier. - /// The server URL. + /// The server. /// The operation path. /// The operation method. /// The operation description. @@ -87,7 +87,7 @@ public sealed class RestApiOperation /// The operation responses. public RestApiOperation( string id, - Uri? serverUrl, + RestApiOperationServer server, string path, HttpMethod method, string description, @@ -96,7 +96,7 @@ public RestApiOperation( IDictionary? responses = null) { this.Id = id; - this.ServerUrl = serverUrl; + this.Server = server; this.Path = path; this.Method = method; this.Description = description; @@ -114,7 +114,7 @@ public RestApiOperation( /// The operation Url. public Uri BuildOperationUrl(IDictionary arguments, Uri? serverUrlOverride = null, Uri? apiHostUrl = null) { - var serverUrl = this.GetServerUrl(serverUrlOverride, apiHostUrl); + var serverUrl = this.GetServerUrl(serverUrlOverride, apiHostUrl, arguments); var path = this.BuildPath(this.Path, arguments); @@ -250,8 +250,9 @@ private string BuildPath(string pathTemplate, IDictionary argum /// /// Override for REST API operation server url. /// The URL of REST API host. + /// The operation arguments. /// The operation server url. - private Uri GetServerUrl(Uri? serverUrlOverride, Uri? apiHostUrl) + private Uri GetServerUrl(Uri? serverUrlOverride, Uri? apiHostUrl, IDictionary arguments) { string serverUrlString; @@ -259,10 +260,30 @@ private Uri GetServerUrl(Uri? serverUrlOverride, Uri? apiHostUrl) { serverUrlString = serverUrlOverride.AbsoluteUri; } + else if (this.Server.Url is not null) + { + serverUrlString = this.Server.Url; + foreach (var variable in this.Server.Variables) + { + arguments.TryGetValue(variable.Key, out object? value); + string? strValue = value as string; + if (strValue is not null && variable.Value.IsValid(strValue)) + { + serverUrlString = serverUrlString.Replace($"{{{variable.Key}}}", strValue); + } + else if (variable.Value.Default is not null) + { + serverUrlString = serverUrlString.Replace($"{{{variable.Key}}}", variable.Value.Default); + } + else + { + throw new KernelException($"No value provided for the '{variable.Key}' server variable of the operation - '{this.Id}'."); + } + } + } else { serverUrlString = - this.ServerUrl?.AbsoluteUri ?? apiHostUrl?.AbsoluteUri ?? throw new InvalidOperationException($"Server url is not defined for operation {this.Id}"); } diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationServer.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationServer.cs new file mode 100644 index 000000000000..0936bdcdfb45 --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationServer.cs @@ -0,0 +1,38 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// REST API Operation Server. +/// +public sealed class RestApiOperationServer +{ + /// + /// A URL to the target host. This URL supports Server Variables and MAY be relative, + /// to indicate that the host location is relative to the location where the OpenAPI document is being served. + /// Variable substitutions will be made when a variable is named in {brackets}. + /// +#pragma warning disable CA1056 // URI-like properties should not be strings + public string? Url { get; } +#pragma warning restore CA1056 // URI-like properties should not be strings + + /// + /// A map between a variable name and its value. The value is used for substitution in the server's URL template. + /// + public IDictionary Variables { get; } + + /// + /// Construct a new object. + /// + /// URL to the target host + /// Substitution variables for the server's URL template +#pragma warning disable CA1054 // URI-like parameters should not be strings + public RestApiOperationServer(string? url = null, IDictionary? variables = null) +#pragma warning restore CA1054 // URI-like parameters should not be strings + { + this.Url = string.IsNullOrEmpty(url) ? null : url; + this.Variables = variables ?? new Dictionary(); + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationServerVariable.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationServerVariable.cs new file mode 100644 index 000000000000..eba41dca754f --- /dev/null +++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationServerVariable.cs @@ -0,0 +1,49 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; + +namespace Microsoft.SemanticKernel.Plugins.OpenApi; + +/// +/// REST API Operation Server Variable. +/// +public sealed class RestApiOperationServerVariable +{ + /// + /// An optional description for the server variable. CommonMark syntax MAY be used for rich text representation. + /// + public string? Description { get; } + + /// + /// REQUIRED. The default value to use for substitution, and to send, if an alternate value is not supplied. + /// Unlike the Schema Object's default, this value MUST be provided by the consumer. + /// + public string Default { get; } + + /// + /// An enumeration of string values to be used if the substitution options are from a limited set. + /// + public List? Enum { get; } + + /// + /// Construct a new object. + /// + /// The default value to use for substitution. + /// An optional description for the server variable. + /// An enumeration of string values to be used if the substitution options are from a limited set. + public RestApiOperationServerVariable(string defaultValue, string? description = null, List? enumValues = null) + { + this.Default = defaultValue; + this.Description = description; + this.Enum = enumValues; + } + + /// + /// Return true if the value is valid based on the enumeration of string values to be used. + /// + /// Value to be used as a substitution. + public bool IsValid(string? value) + { + return this.Enum?.Contains(value!) ?? true; + } +} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs deleted file mode 100644 index c5890d604d81..000000000000 --- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs +++ /dev/null @@ -1,257 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Net.Http; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.SemanticKernel.Http; - -namespace Microsoft.SemanticKernel.Plugins.OpenApi; - -/// -/// Provides extension methods for importing plugins exposed through OpenAI's ChatGPT format. -/// -[Obsolete("This class is deprecated and will be removed in a future version.")] -public static class OpenAIPluginKernelExtensions -{ - private static readonly JsonSerializerOptions s_jsonOptionsCache = - new() - { - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }, - }; - - // TODO: Review XML comments - - /// - /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format and imports it into the 's plugin collection. - /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Plugin name. - /// The file path to the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task ImportPluginFromOpenAIAsync( - this Kernel kernel, - string pluginName, - string filePath, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - KernelPlugin plugin = await kernel.CreatePluginFromOpenAIAsync(pluginName, filePath, executionParameters, cancellationToken).ConfigureAwait(false); - kernel.Plugins.Add(plugin); - return plugin; - } - - /// - /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format and imports it into the 's plugin collection. - /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Plugin name. - /// A local or remote URI referencing the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task ImportPluginFromOpenAIAsync( - this Kernel kernel, - string pluginName, - Uri uri, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - KernelPlugin plugin = await kernel.CreatePluginFromOpenAIAsync(pluginName, uri, executionParameters, cancellationToken).ConfigureAwait(false); - kernel.Plugins.Add(plugin); - return plugin; - } - - /// - /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format and imports it into the 's plugin collection. - /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Plugin name. - /// A stream representing the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task ImportPluginFromOpenAIAsync( - this Kernel kernel, - string pluginName, - Stream stream, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - KernelPlugin plugin = await kernel.CreatePluginFromOpenAIAsync(pluginName, stream, executionParameters, cancellationToken).ConfigureAwait(false); - kernel.Plugins.Add(plugin); - return plugin; - } - - /// - /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format. - /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Plugin name. - /// The file path to the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task CreatePluginFromOpenAIAsync( - this Kernel kernel, - string pluginName, - string filePath, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); - - var openAIManifest = await DocumentLoader.LoadDocumentFromFilePathAsync( - filePath, - kernel.LoggerFactory.CreateLogger(typeof(OpenAIPluginKernelExtensions)) ?? NullLogger.Instance, - cancellationToken).ConfigureAwait(false); - - return await CreateAsync( - kernel, - openAIManifest, - pluginName, - executionParameters, - cancellationToken: cancellationToken).ConfigureAwait(false); - } - - /// - /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format. - /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Plugin name. - /// A local or remote URI referencing the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task CreatePluginFromOpenAIAsync( - this Kernel kernel, - string pluginName, - Uri uri, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); - -#pragma warning disable CA2000 // Dispose objects before losing scope. No need to dispose the Http client here. It can either be an internal client using NonDisposableHttpClientHandler or an external client managed by the calling code, which should handle its disposal. - var httpClient = HttpClientProvider.GetHttpClient(executionParameters?.HttpClient ?? kernel.Services.GetService()); -#pragma warning restore CA2000 - - var openAIManifest = await DocumentLoader.LoadDocumentFromUriAsync( - uri, - kernel.LoggerFactory.CreateLogger(typeof(OpenAIPluginKernelExtensions)) ?? NullLogger.Instance, - httpClient, - null, // auth is not needed when loading the manifest - executionParameters?.UserAgent, - cancellationToken).ConfigureAwait(false); - - return await CreateAsync( - kernel, - openAIManifest, - pluginName, - executionParameters, - cancellationToken).ConfigureAwait(false); - } - - /// - /// Creates a plugin for an OpenAI plugin exposed through OpenAI's ChatGPT format. - /// - /// The containing services, plugins, and other state for use throughout the operation. - /// Plugin name. - /// A stream representing the AI Plugin - /// Plugin execution parameters. - /// The cancellation token. - /// A collection of invocable functions - public static async Task CreatePluginFromOpenAIAsync( - this Kernel kernel, - string pluginName, - Stream stream, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); - Verify.ValidPluginName(pluginName, kernel.Plugins); - - var openAIManifest = await DocumentLoader.LoadDocumentFromStreamAsync(stream).ConfigureAwait(false); - - return await CreateAsync( - kernel, - openAIManifest, - pluginName, - executionParameters, - cancellationToken: cancellationToken).ConfigureAwait(false); - } - - #region private - - private static async Task CreateAsync( - Kernel kernel, - string openAIManifest, - string pluginName, - OpenAIFunctionExecutionParameters? executionParameters = null, - CancellationToken cancellationToken = default) - { - JsonNode pluginJson; - OpenAIAuthenticationConfig openAIAuthConfig; - try - { - pluginJson = JsonNode.Parse(openAIManifest)!; - openAIAuthConfig = pluginJson["auth"].Deserialize(s_jsonOptionsCache)!; - } - catch (JsonException ex) - { - throw new KernelException("Parsing of Open AI manifest failed.", ex); - } - - if (executionParameters?.AuthCallback is not null) - { - var callback = executionParameters.AuthCallback; - ((OpenApiFunctionExecutionParameters)executionParameters).AuthCallback = async (request, ct) => - { - await callback(request, pluginName, openAIAuthConfig, ct).ConfigureAwait(false); - }; - } - - return await kernel.CreatePluginFromOpenApiAsync( - pluginName, - ParseOpenAIManifestForOpenApiSpecUrl(pluginJson), - executionParameters, - cancellationToken).ConfigureAwait(false); - } - - private static Uri ParseOpenAIManifestForOpenApiSpecUrl(JsonNode pluginJson) - { - string? apiType = pluginJson?["api"]?["type"]?.ToString(); - if (string.IsNullOrWhiteSpace(apiType) || apiType != "openapi") - { - throw new KernelException($"Unexpected API type '{apiType}' found in Open AI manifest."); - } - - string? apiUrl = pluginJson?["api"]?["url"]?.ToString(); - if (string.IsNullOrWhiteSpace(apiUrl)) - { - throw new KernelException("No Open API spec URL found in Open AI manifest."); - } - - try - { - return new Uri(apiUrl); - } - catch (System.UriFormatException ex) - { - throw new KernelException("Invalid Open API spec URI found in Open AI manifest.", ex); - } - } - - #endregion -} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs deleted file mode 100644 index b8d7d1015a3e..000000000000 --- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs +++ /dev/null @@ -1,19 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; - -namespace Microsoft.SemanticKernel.Plugins.OpenApi; - -/// -/// Represents a delegate that defines the method signature for asynchronously authenticating an HTTP request. -/// -/// The to authenticate. -/// The name of the plugin to be authenticated. -/// The used to authenticate. -/// The cancellation token. -/// A representing the asynchronous operation. -[Obsolete("This delegate is deprecated and will be removed in a future version.")] -public delegate Task OpenAIAuthenticateRequestAsyncCallback(HttpRequestMessage request, string pluginName, OpenAIAuthenticationConfig openAIAuthConfig, CancellationToken cancellationToken = default); diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs deleted file mode 100644 index 5d01bc083f3a..000000000000 --- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs +++ /dev/null @@ -1,100 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Plugins.OpenApi; - -/// -/// Represents the authentication section for an OpenAI plugin. -/// -[Obsolete("This class is deprecated and will be removed in a future version.")] -public class OpenAIAuthenticationConfig -{ - /// - /// The type of authentication. - /// - [JsonPropertyName("type")] - public OpenAIAuthenticationType Type { get; set; } = OpenAIAuthenticationType.None; - - /// - /// The type of authorization. - /// - [JsonPropertyName("authorization_type")] - public OpenAIAuthorizationType AuthorizationType { get; set; } - - /// - /// The client URL. - /// - [JsonPropertyName("client_url")] - public Uri? ClientUrl { get; set; } - - /// - /// The authorization URL. - /// - [JsonPropertyName("authorization_url")] - public Uri? AuthorizationUrl { get; set; } - - /// - /// The authorization content type. - /// - [JsonPropertyName("authorization_content_type")] - public string? AuthorizationContentType { get; set; } - - /// - /// The authorization scope. - /// - [JsonPropertyName("scope")] - public string? Scope { get; set; } - - /// - /// The verification tokens. - /// - [JsonPropertyName("verification_tokens")] - public Dictionary? VerificationTokens { get; set; } -} - -/// -/// Represents the type of authentication for an OpenAI plugin. -/// -[Obsolete("This enum is deprecated and will be removed in a future version.")] -public enum OpenAIAuthenticationType -{ - /// - /// No authentication. - /// - None, - - /// - /// User HTTP authentication. - /// - UserHttp, - - /// - /// Service HTTP authentication. - /// - ServiceHttp, - - /// - /// OAuth authentication. - /// - OAuth -} - -/// -/// Represents the type of authorization for an OpenAI plugin. -/// -[Obsolete("This enum is deprecated and will be removed in a future version.")] -public enum OpenAIAuthorizationType -{ - /// - /// Basic authorization. - /// - Basic, - - /// - /// Bearer authorization. - /// - Bearer -} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs deleted file mode 100644 index 5f04bec5c039..000000000000 --- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs +++ /dev/null @@ -1,31 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Net.Http; - -namespace Microsoft.SemanticKernel.Plugins.OpenApi; - -/// -/// OpenAI function execution parameters -/// -[Obsolete("This class is deprecated and will be removed in a future version.")] -public class OpenAIFunctionExecutionParameters : OpenApiFunctionExecutionParameters -{ - /// - /// Callback for adding Open AI authentication data to HTTP requests. - /// - public new OpenAIAuthenticateRequestAsyncCallback? AuthCallback { get; set; } - - /// - public OpenAIFunctionExecutionParameters( - HttpClient? httpClient = null, - OpenAIAuthenticateRequestAsyncCallback? authCallback = null, - Uri? serverUrlOverride = null, - string? userAgent = null, - bool ignoreNonCompliantErrors = false, - bool enableDynamicOperationPayload = true, - bool enablePayloadNamespacing = false) : base(httpClient, null, serverUrlOverride, userAgent, ignoreNonCompliantErrors, enableDynamicOperationPayload, enablePayloadNamespacing) - { - this.AuthCallback = authCallback; - } -} diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs index 2d6b856b4700..e0614f67e8fb 100644 --- a/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs +++ b/dotnet/src/Functions/Functions.OpenApi/OpenApi/OpenApiDocumentParser.cs @@ -158,11 +158,11 @@ private static List ExtractRestApiOperations(OpenApiDocument d { var result = new List(); - var serverUrl = document.Servers.FirstOrDefault()?.Url; + var server = document.Servers.FirstOrDefault(); foreach (var pathPair in document.Paths) { - var operations = CreateRestApiOperations(serverUrl, pathPair.Key, pathPair.Value, operationsToExclude, logger); + var operations = CreateRestApiOperations(server, pathPair.Key, pathPair.Value, operationsToExclude, logger); result.AddRange(operations); } @@ -173,15 +173,16 @@ private static List ExtractRestApiOperations(OpenApiDocument d /// /// Creates REST API operation. /// - /// The server url. + /// Rest server. /// Rest resource path. /// Rest resource metadata. /// Optional list of operations not to import, e.g. in case they are not supported /// Used to perform logging. /// Rest operation. - internal static List CreateRestApiOperations(string? serverUrl, string path, OpenApiPathItem pathItem, IList? operationsToExclude, ILogger logger) + internal static List CreateRestApiOperations(OpenApiServer? server, string path, OpenApiPathItem pathItem, IList? operationsToExclude, ILogger logger) { var operations = new List(); + var operationServer = CreateRestApiOperationServer(server); foreach (var operationPair in pathItem.Operations) { @@ -196,7 +197,7 @@ internal static List CreateRestApiOperations(string? serverUrl var operation = new RestApiOperation( operationItem.OperationId, - string.IsNullOrEmpty(serverUrl) ? null : new Uri(serverUrl), + operationServer, path, new HttpMethod(method), string.IsNullOrEmpty(operationItem.Description) ? operationItem.Summary : operationItem.Description, @@ -214,6 +215,16 @@ internal static List CreateRestApiOperations(string? serverUrl return operations; } + /// + /// Build a object from the given object. + /// + /// Represents the server which hosts the REST API. + private static RestApiOperationServer CreateRestApiOperationServer(OpenApiServer? server) + { + var variables = server?.Variables.ToDictionary(item => item.Key, item => new RestApiOperationServerVariable(item.Value.Default, item.Value.Description, item.Value.Enum)); + return new(server?.Url, variables); + } + /// /// Build a dictionary of extension key value pairs from the given open api extension model, where the key is the extension name /// and the value is either the actual value in the case of primitive types like string, int, date, etc, or a json string in the diff --git a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs index 8d4998207aec..5b98d4133961 100644 --- a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs +++ b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs @@ -214,7 +214,7 @@ private async Task SendAsync( try { - responseMessage = await this._httpClient.SendWithSuccessCheckAsync(requestMessage, cancellationToken).ConfigureAwait(false); + responseMessage = await this._httpClient.SendWithSuccessCheckAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); response = await this.ReadContentAndCreateOperationResponseAsync(requestMessage, responseMessage, payload, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj b/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj index b730d1c27025..77e97f711e7f 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/Functions.Prompty.UnitTests.csproj @@ -8,6 +8,7 @@ disable false $(NoWarn);CS1591;CA2007,CA1861,CA1869,VSTHRD111,SKEXP0040,SKEXP0010,SKEXP0001 + true @@ -24,16 +25,18 @@ all + - + Always + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs index 148e5a1d28b2..f86a93fbce70 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs @@ -8,6 +8,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.FileProviders; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.TextGeneration; @@ -60,8 +61,8 @@ public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettings() // Assert Assert.NotNull(executionSettings); Assert.Equal("gpt-35-turbo", executionSettings.ModelId); - Assert.Equal(1.0, executionSettings.Temperature); - Assert.Equal(1.0, executionSettings.TopP); + Assert.Null(executionSettings.Temperature); + Assert.Null(executionSettings.TopP); Assert.Null(executionSettings.StopSequences); Assert.Null(executionSettings.ResponseFormat); Assert.Null(executionSettings.TokenSelectionBiases); @@ -121,6 +122,69 @@ public void ItShouldCreateFunctionFromPromptYamlWithNoExecutionSettings() Assert.Empty(kernelFunction.ExecutionSettings!); } + [Fact] + public void ItShouldCreateFunctionFromPromptYamlWithEmbeddedFileProvider() + { + // Arrange + Kernel kernel = new(); + var chatPromptyPath = Path.Combine("TestData", "chat.prompty"); + ManifestEmbeddedFileProvider manifestEmbeddedProvider = new(typeof(PromptyTest).Assembly); + + // Act + var kernelFunction = kernel.CreateFunctionFromPromptyFile(chatPromptyPath, + fileProvider: manifestEmbeddedProvider); + + // Assert + Assert.NotNull(kernelFunction); + + var executionSettings = kernelFunction.ExecutionSettings; + Assert.Single(executionSettings!); + Assert.True(executionSettings!.ContainsKey("default")); + } + + [Fact] + public void ItShouldCreateFunctionFromPromptYamlWithFileProvider() + { + // Arrange + Kernel kernel = new(); + var currentDirectory = Directory.GetCurrentDirectory(); + var chatPromptyPath = Path.Combine("TestData", "chat.prompty"); + using PhysicalFileProvider fileProvider = new(currentDirectory); + + // Act + var kernelFunction = kernel.CreateFunctionFromPromptyFile(chatPromptyPath, + fileProvider); + + // Assert + Assert.NotNull(kernelFunction); + + var executionSettings = kernelFunction.ExecutionSettings; + Assert.Single(executionSettings!); + Assert.True(executionSettings!.ContainsKey("default")); + } + + [Fact] + public void ItShouldCreateFunctionFromPromptYamlWithFileInfo() + { + // Arrange + Kernel kernel = new(); + var currentDirectory = Directory.GetCurrentDirectory(); + var chatPromptyPath = Path.Combine("TestData", "chat.prompty"); + using PhysicalFileProvider fileProvider = new(currentDirectory); + var fileInfo = fileProvider.GetFileInfo(chatPromptyPath); + + // Act + var kernelFunction = kernel.CreateFunctionFromPromptyFile( + fileInfo: fileInfo); + + // Assert + Assert.NotNull(kernelFunction); + + var executionSettings = kernelFunction.ExecutionSettings; + Assert.Single(executionSettings!); + Assert.True(executionSettings!.ContainsKey("default")); + } + [Fact] public void ItFailsToParseAnEmptyHeader() { diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty index a6be798dbf1a..ba095afeebfc 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/TestData/chatJsonObject.prompty @@ -11,6 +11,7 @@ model: parameters: temperature: 0.0 max_tokens: 3000 + top_p: 1.0 response_format: type: json_object diff --git a/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs b/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs index 67778bf4c912..19e883cb9b60 100644 --- a/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs @@ -2,6 +2,7 @@ using System; using System.IO; +using Microsoft.Extensions.FileProviders; using Microsoft.SemanticKernel.Prompty; namespace Microsoft.SemanticKernel; @@ -64,4 +65,59 @@ public static KernelFunction CreateFunctionFromPrompty( promptTemplateFactory ?? KernelFunctionPrompty.s_defaultTemplateFactory, kernel.LoggerFactory); } + + /// + /// Create a from a prompty template file. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// Path to the file containing the Prompty representation of a prompt based . + /// The representation of the file system to use to retrieve the prompty file. Defaults to scoped to the current directory. + /// + /// The to use when interpreting the prompt template configuration into a . + /// If null, a will be used with support for Liquid and Handlebars prompt templates. + /// + /// The created . + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. + public static KernelFunction CreateFunctionFromPromptyFile( + this Kernel kernel, + string promptyFilePath, + IFileProvider fileProvider, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + Verify.NotNullOrWhiteSpace(promptyFilePath); + Verify.NotNull(fileProvider); + + var fileInfo = fileProvider.GetFileInfo(promptyFilePath); + return CreateFunctionFromPromptyFile(kernel, fileInfo, promptTemplateFactory); + } + + /// + /// Create a from a prompty template file. + /// + /// The containing services, plugins, and other state for use throughout the operation. + /// The file containing the Prompty representation of a prompt based . + /// + /// The to use when interpreting the prompt template configuration into a . + /// If null, a will be used with support for Liquid and Handlebars prompt templates. + /// + /// The created . + /// is null. + /// is null. + /// path is not found. + public static KernelFunction CreateFunctionFromPromptyFile( + this Kernel kernel, + IFileInfo fileInfo, + IPromptTemplateFactory? promptTemplateFactory = null) + { + Verify.NotNull(kernel); + Verify.NotNull(fileInfo); + Verify.True(fileInfo.Exists, $"The file '{fileInfo.PhysicalPath}' doesn't exist."); + + using StreamReader reader = new(fileInfo.CreateReadStream()); + var promptyTemplate = reader.ReadToEnd(); + return kernel.CreateFunctionFromPrompty(promptyTemplate, promptTemplateFactory); + } } diff --git a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj index f340015d4a5d..7a63018ef572 100644 --- a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj +++ b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj @@ -19,5 +19,6 @@ + \ No newline at end of file diff --git a/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj b/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj index 178dd4860a24..95f3ddc30b1d 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj +++ b/dotnet/src/Functions/Functions.UnitTests/Functions.UnitTests.csproj @@ -54,7 +54,7 @@ - + diff --git a/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs b/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs index a277284f3ccc..48b98eaa5bef 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/Markdown/Functions/KernelFunctionMarkdownTests.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Linq; using Microsoft.SemanticKernel; using Xunit; @@ -18,9 +19,91 @@ public void ItShouldCreatePromptFunctionConfigFromMarkdown() Assert.NotNull(model); Assert.Equal("TellMeAbout", model.Name); Assert.Equal("Hello AI, tell me about {{$input}}", model.Template); - Assert.Equal(2, model.ExecutionSettings.Count); + Assert.Equal(6, model.ExecutionSettings.Count); Assert.Equal("gpt4", model.ExecutionSettings["service1"].ModelId); Assert.Equal("gpt3.5", model.ExecutionSettings["service2"].ModelId); + Assert.Equal("gpt3.5-turbo", model.ExecutionSettings["service3"].ModelId); + Assert.Equal("gpt4", model.ExecutionSettings["service4"].ModelId); + Assert.Equal("gpt4", model.ExecutionSettings["service5"].ModelId); + Assert.Equal("gpt4", model.ExecutionSettings["service6"].ModelId); + } + + [Fact] + public void ItShouldInitializeFunctionChoiceBehaviorsFromMarkdown() + { + // Arrange + var kernel = new Kernel(); + kernel.Plugins.AddFromFunctions("p1", [KernelFunctionFactory.CreateFromMethod(() => { }, "f1")]); + kernel.Plugins.AddFromFunctions("p2", [KernelFunctionFactory.CreateFromMethod(() => { }, "f2")]); + kernel.Plugins.AddFromFunctions("p3", [KernelFunctionFactory.CreateFromMethod(() => { }, "f3")]); + + // Act + var function = KernelFunctionMarkdown.CreateFromPromptMarkdown(Markdown, "TellMeAbout"); + + // Assert + Assert.NotNull(function); + Assert.NotEmpty(function.ExecutionSettings); + + Assert.Equal(6, function.ExecutionSettings.Count); + + // AutoFunctionCallChoice for service1 + var service1ExecutionSettings = function.ExecutionSettings["service1"]; + Assert.NotNull(service1ExecutionSettings?.FunctionChoiceBehavior); + + var autoConfig = service1ExecutionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: []) { Kernel = kernel }); + Assert.NotNull(autoConfig); + Assert.Equal(FunctionChoice.Auto, autoConfig.Choice); + Assert.NotNull(autoConfig.Functions); + Assert.Equal("p1", autoConfig.Functions.Single().PluginName); + Assert.Equal("f1", autoConfig.Functions.Single().Name); + + // RequiredFunctionCallChoice for service2 + var service2ExecutionSettings = function.ExecutionSettings["service2"]; + Assert.NotNull(service2ExecutionSettings?.FunctionChoiceBehavior); + + var requiredConfig = service2ExecutionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: []) { Kernel = kernel }); + Assert.NotNull(requiredConfig); + Assert.Equal(FunctionChoice.Required, requiredConfig.Choice); + Assert.NotNull(requiredConfig.Functions); + Assert.Equal("p2", requiredConfig.Functions.Single().PluginName); + Assert.Equal("f2", requiredConfig.Functions.Single().Name); + + // NoneFunctionCallChoice for service3 + var service3ExecutionSettings = function.ExecutionSettings["service3"]; + Assert.NotNull(service3ExecutionSettings?.FunctionChoiceBehavior); + + var noneConfig = service3ExecutionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: []) { Kernel = kernel }); + Assert.NotNull(noneConfig); + Assert.Equal(FunctionChoice.None, noneConfig.Choice); + Assert.NotNull(noneConfig.Functions); + Assert.Equal("p3", noneConfig.Functions.Single().PluginName); + Assert.Equal("f3", noneConfig.Functions.Single().Name); + + // AutoFunctionCallChoice with empty functions collection for service4 + var service4ExecutionSettings = function.ExecutionSettings["service4"]; + Assert.NotNull(service4ExecutionSettings?.FunctionChoiceBehavior); + + var autoWithEmptyFunctionsConfig = service4ExecutionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: []) { Kernel = kernel }); + Assert.NotNull(autoWithEmptyFunctionsConfig); + Assert.Equal(FunctionChoice.Auto, autoWithEmptyFunctionsConfig.Choice); + Assert.Null(autoWithEmptyFunctionsConfig.Functions); + + // AutoFunctionCallChoice with no functions collection for service5 + var service5ExecutionSettings = function.ExecutionSettings["service5"]; + Assert.NotNull(service5ExecutionSettings?.FunctionChoiceBehavior); + + var autoWithNoFunctionsConfig = service5ExecutionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: []) { Kernel = kernel }); + Assert.NotNull(autoWithNoFunctionsConfig); + Assert.Equal(FunctionChoice.Auto, autoWithNoFunctionsConfig.Choice); + Assert.NotNull(autoWithNoFunctionsConfig.Functions); + Assert.Equal(3, autoWithNoFunctionsConfig.Functions.Count); + Assert.Contains(autoWithNoFunctionsConfig.Functions, f => f.PluginName == "p1" && f.Name == "f1"); + Assert.Contains(autoWithNoFunctionsConfig.Functions, f => f.PluginName == "p2" && f.Name == "f2"); + Assert.Contains(autoWithNoFunctionsConfig.Functions, f => f.PluginName == "p3" && f.Name == "f3"); + + // No function choice behavior for service6 + var service6ExecutionSettings = function.ExecutionSettings["service6"]; + Assert.Null(service6ExecutionSettings?.FunctionChoiceBehavior); } [Fact] @@ -47,7 +130,11 @@ These are AI execution settings { "service1" : { "model_id": "gpt4", - "temperature": 0.7 + "temperature": 0.7, + "function_choice_behavior": { + "type": "auto", + "functions": ["p1.f1"] + } } } ``` @@ -56,7 +143,58 @@ These are more AI execution settings { "service2" : { "model_id": "gpt3.5", - "temperature": 0.8 + "temperature": 0.8, + "function_choice_behavior": { + "type": "required", + "functions": ["p2.f2"] + } + } + } + ``` + These are AI execution settings as well + ```sk.execution_settings + { + "service3" : { + "model_id": "gpt3.5-turbo", + "temperature": 0.8, + "function_choice_behavior": { + "type": "none", + "functions": ["p3.f3"] + } + } + } + ``` + These are AI execution settings + ```sk.execution_settings + { + "service4" : { + "model_id": "gpt4", + "temperature": 0.7, + "function_choice_behavior": { + "type": "auto", + "functions": [] + } + } + } + ``` + These are AI execution settings + ```sk.execution_settings + { + "service5" : { + "model_id": "gpt4", + "temperature": 0.7, + "function_choice_behavior": { + "type": "auto" + } + } + } + ``` + These are AI execution settings + ```sk.execution_settings + { + "service6" : { + "model_id": "gpt4", + "temperature": 0.7 } } ``` diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs index 022a12d95719..2a79ee3bcc33 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Extensions/RestApiOperationExtensionsTests.cs @@ -256,7 +256,7 @@ private static RestApiOperation CreateTestOperation(string method, RestApiOperat { return new RestApiOperation( id: "fake-id", - serverUrl: url, + server: new(url?.AbsoluteUri), path: "fake-path", method: new HttpMethod(method), description: "fake-description", diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs deleted file mode 100644 index 7c00e7ba375d..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs +++ /dev/null @@ -1,95 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.IO; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Plugins.OpenApi; -using Moq; -using SemanticKernel.Functions.UnitTests.OpenApi.TestPlugins; -using Xunit; - -namespace SemanticKernel.Functions.UnitTests.OpenApi.OpenAI; - -[Obsolete("OpenAI plugins are deprecated and will be removed in a future version.")] -public sealed class KernelOpenAIPluginExtensionsTests : IDisposable -{ - /// - /// OpenAPI document stream. - /// - private readonly Stream _openApiDocument; - - /// - /// Kernel instance. - /// - private readonly Kernel _kernel; - - /// - /// Creates an instance of a class. - /// - public KernelOpenAIPluginExtensionsTests() - { - this._kernel = new Kernel(); - - this._openApiDocument = ResourcePluginsProvider.LoadFromResource("documentV2_0.json"); - } - - [Fact] - public async Task ItUsesOauthFromOpenAiPluginManifestWhenFetchingOpenApiSpecAsync() - { - await this.ItRunsTestAsync("ai-plugin.json"); - } - - [Fact] - public async Task ItUsesHttpAuthFromOpenAiPluginManifestWhenFetchingOpenApiSpecAsync() - { - await this.ItRunsTestAsync("ai-plugin2.json"); - } - - private async Task ItRunsTestAsync(string resourceName) - { - //Arrange - using var reader = new StreamReader(ResourcePluginsProvider.LoadFromResource(resourceName), Encoding.UTF8); - JsonNode openAIDocumentContent = JsonNode.Parse(await reader.ReadToEndAsync())!; - var actualOpenAIAuthConfig = - openAIDocumentContent["auth"].Deserialize( - new JsonSerializerOptions - { - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }, - })!; - - using var openAiDocument = ResourcePluginsProvider.LoadFromResource(resourceName); - using var messageHandlerStub = new HttpMessageHandlerStub(this._openApiDocument); - - using var httpClient = new HttpClient(messageHandlerStub, false); - var authCallbackMock = new Mock(); - var executionParameters = new OpenAIFunctionExecutionParameters { HttpClient = httpClient, AuthCallback = authCallbackMock.Object }; - - var pluginName = "fakePlugin"; - - //Act - var plugin = await this._kernel.ImportPluginFromOpenAIAsync(pluginName, openAiDocument, executionParameters); - - //Assert - var setSecretFunction = plugin["SetSecret"]; - Assert.NotNull(setSecretFunction); - - authCallbackMock.Verify(target => target.Invoke( - It.IsAny(), - It.Is(expectedPluginName => expectedPluginName == pluginName), - It.Is(expectedOpenAIAuthConfig => expectedOpenAIAuthConfig.Scope == actualOpenAIAuthConfig!.Scope), - It.IsAny()), - Times.Exactly(1)); - } - - public void Dispose() - { - this._openApiDocument.Dispose(); - } -} diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs index 1e3109c0c1ff..74b16b14e096 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV20Tests.cs @@ -103,7 +103,7 @@ public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() var putOperation = restApi.Operations.Single(o => o.Id == "SetSecret"); Assert.NotNull(putOperation); Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); - Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); + Assert.Equal("https://my-key-vault.vault.azure.net", putOperation.Server.Url); Assert.Equal(HttpMethod.Put, putOperation.Method); Assert.Equal("/secrets/{secret-name}", putOperation.Path); @@ -266,7 +266,7 @@ public async Task ItCanWorkWithDocumentsWithoutHostAndSchemaAttributesAsync() var restApi = await this._sut.ParseAsync(stream); //Assert - Assert.All(restApi.Operations, (op) => Assert.Null(op.ServerUrl)); + Assert.All(restApi.Operations, (op) => Assert.Null(op.Server.Url)); } [Fact] diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs index cb9eec5eb508..c2056ecf4606 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV30Tests.cs @@ -104,7 +104,7 @@ public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() var putOperation = restApi.Operations.Single(o => o.Id == "SetSecret"); Assert.NotNull(putOperation); Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); - Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); + Assert.Equal("https://my-key-vault.vault.azure.net", putOperation.Server.Url); Assert.Equal(HttpMethod.Put, putOperation.Method); Assert.Equal("/secrets/{secret-name}", putOperation.Path); @@ -289,7 +289,7 @@ public async Task ItCanWorkWithDocumentsWithoutServersAttributeAsync() var restApi = await this._sut.ParseAsync(stream); //Assert - Assert.All(restApi.Operations, (op) => Assert.Null(op.ServerUrl)); + Assert.All(restApi.Operations, (op) => Assert.Null(op.Server.Url)); } [Fact] @@ -305,7 +305,7 @@ public async Task ItCanWorkWithDocumentsWithEmptyServersAttributeAsync() var restApi = await this._sut.ParseAsync(stream); //Assert - Assert.All(restApi.Operations, (op) => Assert.Null(op.ServerUrl)); + Assert.All(restApi.Operations, (op) => Assert.Null(op.Server.Url)); } [Theory] diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs index 60e182f1bfc6..54c1f1492f84 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenApiDocumentParserV31Tests.cs @@ -104,7 +104,7 @@ public async Task ItCanParsePutOperationMetadataSuccessfullyAsync() var putOperation = restApi.Operations.Single(o => o.Id == "SetSecret"); Assert.NotNull(putOperation); Assert.Equal("Sets a secret in a specified key vault.", putOperation.Description); - Assert.Equal("https://my-key-vault.vault.azure.net/", putOperation.ServerUrl?.AbsoluteUri); + Assert.Equal("https://my-key-vault.vault.azure.net", putOperation.Server.Url); Assert.Equal(HttpMethod.Put, putOperation.Method); Assert.Equal("/secrets/{secret-name}", putOperation.Path); @@ -266,7 +266,7 @@ public async Task ItCanWorkWithDocumentsWithoutServersAttributeAsync() var restApi = await this._sut.ParseAsync(stream); //Assert - Assert.All(restApi.Operations, (op) => Assert.Null(op.ServerUrl)); + Assert.All(restApi.Operations, (op) => Assert.Null(op.Server.Url)); } [Fact] @@ -282,7 +282,7 @@ public async Task ItCanWorkWithDocumentsWithEmptyServersAttributeAsync() var restApi = await this._sut.ParseAsync(stream); //Assert - Assert.All(restApi.Operations, (op) => Assert.Null(op.ServerUrl)); + Assert.All(restApi.Operations, (op) => Assert.Null(op.Server.Url)); } [Theory] diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs index 6d72c632e6d7..9bbd49a287d3 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs @@ -66,7 +66,7 @@ public async Task ItCanRunCreateAndUpdateOperationsWithJsonPayloadSuccessfullyAs var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", httpMethod, "fake-description", @@ -144,7 +144,7 @@ public async Task ItCanRunCreateAndUpdateOperationsWithPlainTextPayloadSuccessfu var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", httpMethod, "fake-description", @@ -210,7 +210,7 @@ public async Task ItShouldAddHeadersToHttpRequestAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -275,7 +275,7 @@ public async Task ItShouldAddUserAgentHeaderToHttpRequestIfConfiguredAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -320,7 +320,7 @@ public async Task ItShouldBuildJsonPayloadDynamicallyAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -384,7 +384,7 @@ public async Task ItShouldBuildJsonPayloadDynamicallyUsingPayloadMetadataDataTyp var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -473,7 +473,7 @@ public async Task ItShouldBuildJsonPayloadDynamicallyResolvingArgumentsByFullNam var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -543,7 +543,7 @@ public async Task ItShouldThrowExceptionIfPayloadMetadataDoesNotHaveContentTypeA // Arrange var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -570,7 +570,7 @@ public async Task ItShouldThrowExceptionIfContentTypeArgumentIsNotProvidedAsync( // Arrange var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -601,7 +601,7 @@ public async Task ItShouldUsePayloadArgumentForPlainTextContentTypeWhenBuildingP var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -641,7 +641,7 @@ public async Task ItShouldUsePayloadAndContentTypeArgumentsIfDynamicPayloadBuild var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -687,7 +687,7 @@ public async Task ItShouldBuildJsonPayloadDynamicallyExcludingOptionalParameters var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -733,7 +733,7 @@ public async Task ItShouldBuildJsonPayloadDynamicallyIncludingOptionalParameters var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -788,7 +788,7 @@ public async Task ItShouldAddRequiredQueryStringParametersIfTheirArgumentsProvid var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -836,7 +836,7 @@ public async Task ItShouldAddNotRequiredQueryStringParametersIfTheirArgumentsPro var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -884,7 +884,7 @@ public async Task ItShouldSkipNotRequiredQueryStringParametersIfNoArgumentsProvi var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -923,7 +923,7 @@ public async Task ItShouldThrowExceptionIfNoArgumentProvidedForRequiredQueryStri var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -954,7 +954,7 @@ public async Task ItShouldReadContentAsStringSuccessfullyAsync(string contentTyp var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -996,7 +996,7 @@ public async Task ItShouldReadContentAsBytesSuccessfullyAsync(string contentType var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -1031,7 +1031,7 @@ public async Task ItShouldThrowExceptionForUnsupportedContentTypeAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -1074,7 +1074,7 @@ public async Task ItShouldReturnRequestUriAndContentAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -1122,7 +1122,7 @@ public async Task ItShouldHandleNoContentAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -1170,7 +1170,7 @@ public async Task ItShouldSetHttpRequestMessageOptionsAsync() var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -1214,7 +1214,7 @@ public async Task ItShouldIncludeRequestDataWhenOperationCanceledExceptionIsThro var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Post, "fake-description", @@ -1244,7 +1244,7 @@ public async Task ItShouldUseCustomHttpResponseContentReaderAsync() // Arrange var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -1278,7 +1278,7 @@ public async Task ItShouldUseDefaultHttpResponseContentReaderIfCustomDoesNotRetu var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -1310,7 +1310,7 @@ public async Task ItShouldDisposeContentStreamAndHttpResponseContentMessageAsync // Arrange var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", @@ -1397,7 +1397,7 @@ public async Task ItShouldReturnExpectedSchemaAsync(string expectedStatusCode, p { var operation = new RestApiOperation( "fake-id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path", HttpMethod.Get, "fake-description", diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs index c9f082b329a3..0a9099a34d8e 100644 --- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs @@ -8,6 +8,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.Plugins.OpenApi; using Microsoft.SemanticKernel.TextGeneration; @@ -23,7 +24,7 @@ public void ItShouldUseHostUrlIfNoOverrideProvided() // Arrange var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "/", HttpMethod.Get, "fake_description", @@ -45,7 +46,7 @@ public void ItShouldUseHostUrlOverrideIfProvided() // Arrange var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "/", HttpMethod.Get, "fake_description", @@ -86,7 +87,7 @@ public void ItShouldBuildOperationUrlWithPathParametersFromArguments() var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "/{p1}/{p2}/other_fake_path_section", HttpMethod.Get, "fake_description", @@ -129,7 +130,7 @@ public void ItShouldBuildOperationUrlWithEncodedArguments() var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "/{p1}/{p2}/other_fake_path_section", HttpMethod.Get, "fake_description", @@ -171,7 +172,7 @@ public void ShouldBuildResourceUrlWithoutQueryString() var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "{fake-path}/", HttpMethod.Get, "fake_description", @@ -212,7 +213,7 @@ public void ItShouldBuildQueryString() var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path/", HttpMethod.Get, "fake_description", @@ -246,7 +247,7 @@ public void ItShouldBuildQueryStringWithQuotes() var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path/", HttpMethod.Get, "fake_description", @@ -279,7 +280,7 @@ public void ItShouldBuildQueryStringForArray() var sut = new RestApiOperation( "fake_id", - new Uri("https://fake-random-test-host"), + new RestApiOperationServer("https://fake-random-test-host"), "fake-path/", HttpMethod.Get, "fake_description", @@ -326,7 +327,7 @@ public void ItShouldRenderHeaderValuesFromArguments() { "fake_header_two", "fake_header_two_value" } }; - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", parameters); + var sut = new RestApiOperation("fake_id", new RestApiOperationServer("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", parameters); // Act var headers = sut.BuildHeaders(arguments); @@ -351,7 +352,7 @@ public void ShouldThrowExceptionIfNoValueProvidedForRequiredHeader() new(name: "fake_header_two", type : "string", isRequired : false, expand : false, location : RestApiOperationParameterLocation.Header, style: RestApiOperationParameterStyle.Simple) }; - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata); + var sut = new RestApiOperation("fake_id", new RestApiOperationServer("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata); // Act void Act() => sut.BuildHeaders(new Dictionary()); @@ -375,7 +376,7 @@ public void ItShouldSkipOptionalHeaderHavingNoValue() ["fake_header_one"] = "fake_header_one_value" }; - var sut = new RestApiOperation("fake_id", new Uri("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata); + var sut = new RestApiOperation("fake_id", new RestApiOperationServer("http://fake_url"), "fake_path", HttpMethod.Get, "fake_description", metadata); // Act var headers = sut.BuildHeaders(arguments); @@ -403,7 +404,7 @@ public void ItShouldCreateHeaderWithCommaSeparatedValues() ["h2"] = "[1,2,3]" }; - var sut = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); + var sut = new RestApiOperation("fake_id", new RestApiOperationServer("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); // Act var headers = sut.BuildHeaders(arguments); @@ -432,7 +433,7 @@ public void ItShouldCreateHeaderWithPrimitiveValue() ["h2"] = true }; - var sut = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); + var sut = new RestApiOperation("fake_id", new RestApiOperationServer("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); // Act var headers = sut.BuildHeaders(arguments); @@ -461,7 +462,7 @@ public void ItShouldMixAndMatchHeadersOfDifferentValueTypes() ["h2"] = "false" }; - var sut = new RestApiOperation("fake_id", new Uri("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); + var sut = new RestApiOperation("fake_id", new RestApiOperationServer("https://fake-random-test-host"), "fake_path", HttpMethod.Get, "fake_description", metadata); // Act var headers = sut.BuildHeaders(arguments); @@ -568,7 +569,7 @@ public void ItBuildsServicesIntoKernel() { var builder = Kernel.CreateBuilder() .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") - .AddAzureOpenAITextGeneration(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); builder.Services.AddSingleton(CultureInfo.InvariantCulture); builder.Services.AddSingleton(CultureInfo.CurrentCulture); @@ -577,10 +578,10 @@ public void ItBuildsServicesIntoKernel() Kernel kernel = builder.Build(); Assert.IsType(kernel.GetRequiredService("openai")); - Assert.IsType(kernel.GetRequiredService("azureopenai")); + Assert.IsType(kernel.GetRequiredService("azureopenai")); Assert.Equal(2, kernel.GetAllServices().Count()); - Assert.Single(kernel.GetAllServices()); + Assert.Equal(2, kernel.GetAllServices().Count()); Assert.Equal(3, kernel.GetAllServices().Count()); } @@ -701,4 +702,72 @@ public void ItAddsTheRightTypesInAddKernel() Assert.NotNull(provider.GetService()); Assert.NotNull(provider.GetService()); } + + [Fact] + public void ItShouldUseDefaultServerVariableIfNoOverrideProvided() + { + // Arrange + var sut = new RestApiOperation( + "fake_id", + new RestApiOperationServer("https://example.com/{version}", new Dictionary { { "version", new RestApiOperationServerVariable("v2") } }), + "/items", + HttpMethod.Get, + "fake_description", + [] + ); + + var arguments = new Dictionary(); + + // Act + var url = sut.BuildOperationUrl(arguments); + + // Assert + Assert.Equal("https://example.com/v2/items", url.OriginalString); + } + + [Fact] + public void ItShouldUseDefaultServerVariableIfInvalidOverrideProvided() + { + // Arrange + var version = new RestApiOperationServerVariable("v2", null, ["v1", "v2"]); + var sut = new RestApiOperation( + "fake_id", + new RestApiOperationServer("https://example.com/{version}", new Dictionary { { "version", version } }), + "/items", + HttpMethod.Get, + "fake_description", + [] + ); + + var arguments = new Dictionary() { { "version", "v3" } }; + + // Act + var url = sut.BuildOperationUrl(arguments); + + // Assert + Assert.Equal("https://example.com/v2/items", url.OriginalString); + } + + [Fact] + public void ItShouldUseServerVariableOverrideIfProvided() + { + // Arrange + var version = new RestApiOperationServerVariable("v2", null, ["v1", "v2", "v3"]); + var sut = new RestApiOperation( + "fake_id", + new RestApiOperationServer("https://example.com/{version}", new Dictionary { { "version", version } }), + "/items", + HttpMethod.Get, + "fake_description", + [] + ); + + var arguments = new Dictionary() { { "version", "v3" } }; + + // Act + var url = sut.BuildOperationUrl(arguments); + + // Assert + Assert.Equal("https://example.com/v3/items", url.OriginalString); + } } diff --git a/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs b/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs index 30bce2a3fac2..16f048955056 100644 --- a/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs +++ b/dotnet/src/Functions/Functions.UnitTests/Yaml/Functions/KernelFunctionYamlTests.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.Linq; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; using Xunit; @@ -12,9 +13,15 @@ namespace SemanticKernel.Functions.UnitTests.Yaml; public class KernelFunctionYamlTests { private readonly ISerializer _serializer; + private readonly Kernel _kernel; public KernelFunctionYamlTests() { + this._kernel = new Kernel(); + this._kernel.Plugins.AddFromFunctions("p1", [KernelFunctionFactory.CreateFromMethod(() => { }, "f1")]); + this._kernel.Plugins.AddFromFunctions("p2", [KernelFunctionFactory.CreateFromMethod(() => { }, "f2")]); + this._kernel.Plugins.AddFromFunctions("p3", [KernelFunctionFactory.CreateFromMethod(() => { }, "f3")]); + this._serializer = new SerializerBuilder() .WithNamingConvention(UnderscoredNamingConvention.Instance) .Build(); @@ -68,7 +75,7 @@ public void ItShouldSupportCreatingOpenAIExecutionSettings() // Arrange var deserializer = new DeserializerBuilder() .WithNamingConvention(UnderscoredNamingConvention.Instance) - .WithNodeDeserializer(new PromptExecutionSettingsNodeDeserializer()) + .WithTypeConverter(new PromptExecutionSettingsTypeConverter()) .Build(); var promptFunctionModel = deserializer.Deserialize(this._yaml); @@ -82,6 +89,69 @@ public void ItShouldSupportCreatingOpenAIExecutionSettings() Assert.Equal(0.0, executionSettings.TopP); } + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviors() + { + // Act + var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(this._yaml); + + // Assert + Assert.NotNull(promptTemplateConfig?.ExecutionSettings); + + // Service with auto function choice behavior + var executionSettings = promptTemplateConfig.ExecutionSettings["service1"]; + Assert.NotNull(executionSettings?.FunctionChoiceBehavior); + + var config = executionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext([]) { Kernel = this._kernel }); + Assert.NotNull(config); + Assert.Equal(FunctionChoice.Auto, config.Choice); + Assert.NotNull(config.Functions); + Assert.Equal("p1", config.Functions.Single().PluginName); + Assert.Equal("f1", config.Functions.Single().Name); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviors() + { + // Act + var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(this._yaml); + + // Assert + Assert.NotNull(promptTemplateConfig?.ExecutionSettings); + + // Service with required function choice behavior + var executionSettings = promptTemplateConfig.ExecutionSettings["service2"]; + Assert.NotNull(executionSettings?.FunctionChoiceBehavior); + + var config = executionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext([]) { Kernel = this._kernel }); + Assert.NotNull(config); + Assert.Equal(FunctionChoice.Required, config.Choice); + Assert.NotNull(config.Functions); + Assert.Equal("p2", config.Functions.Single().PluginName); + Assert.Equal("f2", config.Functions.Single().Name); + } + + [Fact] + public void ItShouldDeserializeNoneFunctionChoiceBehaviors() + { + // Act + var promptTemplateConfig = KernelFunctionYaml.ToPromptTemplateConfig(this._yaml); + + // Assert + Assert.NotNull(promptTemplateConfig?.ExecutionSettings); + + // Service with none function choice behavior + var executionSettings = promptTemplateConfig.ExecutionSettings["service3"]; + Assert.NotNull(executionSettings?.FunctionChoiceBehavior); + + var noneConfig = executionSettings.FunctionChoiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext([]) { Kernel = this._kernel }); + Assert.NotNull(noneConfig); + Assert.Equal(FunctionChoice.None, noneConfig.Choice); + Assert.NotNull(noneConfig.Functions); + Assert.Equal("p3", noneConfig.Functions.Single().PluginName); + Assert.Equal("f3", noneConfig.Functions.Single().Name); + } + [Fact] public void ItShouldCreateFunctionWithDefaultValueOfStringType() { @@ -157,6 +227,10 @@ string CreateYaml(object defaultValue) frequency_penalty: 0.0 max_tokens: 256 stop_sequences: [] + function_choice_behavior: + type: auto + functions: + - p1.f1 service2: model_id: gpt-3.5 temperature: 1.0 @@ -165,6 +239,22 @@ string CreateYaml(object defaultValue) frequency_penalty: 0.0 max_tokens: 256 stop_sequences: [ "foo", "bar", "baz" ] + function_choice_behavior: + type: required + functions: + - p2.f2 + service3: + model_id: gpt-3.5 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [ "foo", "bar", "baz" ] + function_choice_behavior: + type: none + functions: + - p3.f3 """; private readonly string _yamlWithCustomSettings = """ diff --git a/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsNodeDeserializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsNodeDeserializerTests.cs deleted file mode 100644 index 140de66fdaa8..000000000000 --- a/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsNodeDeserializerTests.cs +++ /dev/null @@ -1,68 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using Microsoft.SemanticKernel; -using Xunit; -using YamlDotNet.Serialization; -using YamlDotNet.Serialization.NamingConventions; - -namespace SemanticKernel.Functions.UnitTests.Yaml; - -/// -/// Tests for . -/// -public sealed class PromptExecutionSettingsNodeDeserializerTests -{ - [Fact] - public void ItShouldCreatePromptFunctionFromYamlWithCustomModelSettings() - { - // Arrange - var deserializer = new DeserializerBuilder() - .WithNamingConvention(UnderscoredNamingConvention.Instance) - .WithNodeDeserializer(new PromptExecutionSettingsNodeDeserializer()) - .Build(); - - // Act - var semanticFunctionConfig = deserializer.Deserialize(this._yaml); - - // Assert - Assert.NotNull(semanticFunctionConfig); - Assert.Equal("SayHello", semanticFunctionConfig.Name); - Assert.Equal("Say hello to the specified person using the specified language", semanticFunctionConfig.Description); - Assert.Equal(2, semanticFunctionConfig.InputVariables.Count); - Assert.Equal("language", semanticFunctionConfig.InputVariables[1].Name); - Assert.Equal(2, semanticFunctionConfig.ExecutionSettings.Count); - Assert.Equal("gpt-4", semanticFunctionConfig.ExecutionSettings["service1"].ModelId); - Assert.Equal("gpt-3.5", semanticFunctionConfig.ExecutionSettings["service2"].ModelId); - } - - private readonly string _yaml = """ - template_format: semantic-kernel - template: Say hello world to {{$name}} in {{$language}} - description: Say hello to the specified person using the specified language - name: SayHello - input_variables: - - name: name - description: The name of the person to greet - default: John - - name: language - description: The language to generate the greeting in - default: English - execution_settings: - service1: - model_id: gpt-4 - temperature: 1.0 - top_p: 0.0 - presence_penalty: 0.0 - frequency_penalty: 0.0 - max_tokens: 256 - stop_sequences: [] - service2: - model_id: gpt-3.5 - temperature: 1.0 - top_p: 0.0 - presence_penalty: 0.0 - frequency_penalty: 0.0 - max_tokens: 256 - stop_sequences: [ "foo", "bar", "baz" ] - """; -} diff --git a/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs new file mode 100644 index 000000000000..d8c927393ca4 --- /dev/null +++ b/dotnet/src/Functions/Functions.UnitTests/Yaml/PromptExecutionSettingsTypeConverterTests.cs @@ -0,0 +1,356 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace SemanticKernel.Functions.UnitTests.Yaml; + +/// +/// Tests for . +/// +public sealed class PromptExecutionSettingsTypeConverterTests +{ + private readonly IDeserializer _deserializer; + + private readonly Kernel _kernel; + + public PromptExecutionSettingsTypeConverterTests() + { + this._deserializer = new DeserializerBuilder() + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .WithTypeConverter(new PromptExecutionSettingsTypeConverter()) + .Build(); + + this._kernel = new Kernel(); + this._kernel.Plugins.Add(GetTestPlugin()); + } + + [Fact] + public void ItShouldCreatePromptFunctionFromYamlWithCustomModelSettings() + { + // Act + var semanticFunctionConfig = this._deserializer.Deserialize(this._yaml); + + // Assert + Assert.NotNull(semanticFunctionConfig); + Assert.Equal("SayHello", semanticFunctionConfig.Name); + Assert.Equal("Say hello to the specified person using the specified language", semanticFunctionConfig.Description); + Assert.Equal(2, semanticFunctionConfig.InputVariables.Count); + Assert.Equal("language", semanticFunctionConfig.InputVariables[1].Name); + Assert.Equal(3, semanticFunctionConfig.ExecutionSettings.Count); + Assert.Equal("gpt-4", semanticFunctionConfig.ExecutionSettings["service1"].ModelId); + Assert.Equal("gpt-3.5", semanticFunctionConfig.ExecutionSettings["service2"].ModelId); + Assert.Equal("gpt-3.5-turbo", semanticFunctionConfig.ExecutionSettings["service3"].ModelId); + } + + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromYamlWithNoFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: auto + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Auto, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromYamlWithEmptyFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: auto + functions: [] + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Auto, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.Null(config?.Functions); + } + + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromYamlWithSpecifiedFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: auto + functions: + - MyPlugin.Function1 + - MyPlugin.Function3 + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Auto, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromYamlWithNoFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: required + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Required, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromYamlWithEmptyFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: required + functions: [] + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Required, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.Null(config?.Functions); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromYamlWithSpecifiedFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: required + functions: + - MyPlugin.Function1 + - MyPlugin.Function3 + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Required, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromYamlWithNoFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: none + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.None, config.Choice); + + Assert.False(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromYamlWithEmptyFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: none + functions: [] + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.None, config.Choice); + + Assert.False(config.AutoInvoke); + + Assert.Null(config?.Functions); + } + + [Fact] + public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromYamlWithSpecifiedFunctionsProperty() + { + // Arrange + var yaml = """ + function_choice_behavior: + type: none + functions: + - MyPlugin.Function1 + - MyPlugin.Function3 + """; + + var executionSettings = this._deserializer.Deserialize(yaml); + + // Act + var config = executionSettings!.FunctionChoiceBehavior!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.None, config.Choice); + + Assert.False(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + private readonly string _yaml = """ + template_format: semantic-kernel + template: Say hello world to {{$name}} in {{$language}} + description: Say hello to the specified person using the specified language + name: SayHello + input_variables: + - name: name + description: The name of the person to greet + default: John + - name: language + description: The language to generate the greeting in + default: English + execution_settings: + service1: + model_id: gpt-4 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [] + function_choice_behavior: + type: auto + functions: + - p1.f1 + service2: + model_id: gpt-3.5 + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [ "foo", "bar", "baz" ] + function_choice_behavior: + type: required + functions: + - p2.f2 + service3: + model_id: gpt-3.5-turbo + temperature: 1.0 + top_p: 0.0 + presence_penalty: 0.0 + frequency_penalty: 0.0 + max_tokens: 256 + stop_sequences: [ "foo", "bar", "baz" ] + function_choice_behavior: + type: none + functions: + - p3.f3 + """; + + private static KernelPlugin GetTestPlugin() + { + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + var function3 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2, function3]); + } +} diff --git a/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj b/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj index dafc4377b0e0..61199d9478b6 100644 --- a/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj +++ b/dotnet/src/Functions/Functions.Yaml/Functions.Yaml.csproj @@ -6,6 +6,15 @@ $(AssemblyName) net8.0;netstandard2.0 true + $(NoWarn);SKEXP0001 + + + + rc + + + + rc diff --git a/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs b/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs index ec2a26fc2b61..863d991bb207 100644 --- a/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs +++ b/dotnet/src/Functions/Functions.Yaml/KernelFunctionYaml.cs @@ -57,7 +57,7 @@ public static PromptTemplateConfig ToPromptTemplateConfig(string text) { var deserializer = new DeserializerBuilder() .WithNamingConvention(UnderscoredNamingConvention.Instance) - .WithNodeDeserializer(new PromptExecutionSettingsNodeDeserializer()) + .WithTypeConverter(new PromptExecutionSettingsTypeConverter()) .Build(); return deserializer.Deserialize(text); diff --git a/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsNodeDeserializer.cs b/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsNodeDeserializer.cs deleted file mode 100644 index 5bd7b839b068..000000000000 --- a/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsNodeDeserializer.cs +++ /dev/null @@ -1,43 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using YamlDotNet.Core; -using YamlDotNet.Serialization; - -namespace Microsoft.SemanticKernel; - -/// -/// Deserializer for . -/// -internal sealed class PromptExecutionSettingsNodeDeserializer : INodeDeserializer -{ - /// - public bool Deserialize(IParser reader, Type expectedType, Func nestedObjectDeserializer, out object? value) - { - if (expectedType != typeof(PromptExecutionSettings)) - { - value = null; - return false; - } - - var dictionary = nestedObjectDeserializer.Invoke(reader, typeof(Dictionary)); - var modelSettings = new PromptExecutionSettings(); - foreach (var kv in (Dictionary)dictionary!) - { - switch (kv.Key) - { - case "model_id": - modelSettings.ModelId = (string)kv.Value; - break; - - default: - (modelSettings.ExtensionData ??= new Dictionary()).Add(kv.Key, kv.Value); - break; - } - } - - value = modelSettings; - return true; - } -} diff --git a/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsTypeConverter.cs b/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsTypeConverter.cs new file mode 100644 index 000000000000..d5a0f155ba74 --- /dev/null +++ b/dotnet/src/Functions/Functions.Yaml/PromptExecutionSettingsTypeConverter.cs @@ -0,0 +1,101 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; +using YamlDotNet.Core; +using YamlDotNet.Core.Events; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.BufferedDeserialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace Microsoft.SemanticKernel; + +/// +/// Allows custom deserialization for from YAML prompts. +/// +internal sealed class PromptExecutionSettingsTypeConverter : IYamlTypeConverter +{ + /// + public bool Accepts(Type type) + { + return type == typeof(PromptExecutionSettings); + } + + /// + public object? ReadYaml(IParser parser, Type type) + { + s_deserializer ??= new DeserializerBuilder() + .WithNamingConvention(UnderscoredNamingConvention.Instance) + .IgnoreUnmatchedProperties() // Required to ignore the 'type' property used as type discrimination. Otherwise, the "Property 'type' not found on type '{type.FullName}'" exception is thrown. + .WithTypeDiscriminatingNodeDeserializer(CreateAndRegisterTypeDiscriminatingNodeDeserializer) + .Build(); + + parser.MoveNext(); // Move to the first property + + var executionSettings = new PromptExecutionSettings(); + while (parser.Current is not MappingEnd) + { + var propertyName = parser.Consume().Value; + switch (propertyName) + { + case "model_id": + executionSettings.ModelId = s_deserializer.Deserialize(parser); + break; + case "function_choice_behavior": + executionSettings.FunctionChoiceBehavior = s_deserializer.Deserialize(parser); + break; + default: + (executionSettings.ExtensionData ??= new Dictionary()).Add(propertyName, s_deserializer.Deserialize(parser)); + break; + } + } + parser.MoveNext(); // Move past the MappingEnd event + return executionSettings; + } + + /// + public void WriteYaml(IEmitter emitter, object? value, Type type) + { + throw new NotImplementedException(); + } + + /// + /// Creates and register a for polymorphic deserialization of . + /// + /// The to configure the . + private static void CreateAndRegisterTypeDiscriminatingNodeDeserializer(ITypeDiscriminatingNodeDeserializerOptions options) + { + var attributes = typeof(FunctionChoiceBehavior).GetCustomAttributes(false); + + // Getting the type discriminator property name - "type" from the JsonPolymorphicAttribute. + var discriminatorKey = attributes.OfType().Single().TypeDiscriminatorPropertyName; + if (string.IsNullOrEmpty(discriminatorKey)) + { + throw new InvalidOperationException("Type discriminator property name is not specified."); + } + + var discriminatorTypeMapping = new Dictionary(); + + // Getting FunctionChoiceBehavior subtypes and their type discriminators registered for polymorphic deserialization. + var derivedTypeAttributes = attributes.OfType(); + foreach (var derivedTypeAttribute in derivedTypeAttributes) + { + var discriminator = derivedTypeAttribute.TypeDiscriminator?.ToString(); + if (string.IsNullOrEmpty(discriminator)) + { + throw new InvalidOperationException($"Type discriminator is not specified for the {derivedTypeAttribute.DerivedType} type."); + } + + discriminatorTypeMapping.Add(discriminator!, derivedTypeAttribute.DerivedType); + } + + options.AddKeyValueTypeDiscriminator(discriminatorKey!, discriminatorTypeMapping); + } + + /// + /// The YamlDotNet deserializer instance. + /// + private static IDeserializer? s_deserializer; +} diff --git a/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs b/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs index 4fd99b717b5e..cf4c7867a0b8 100644 --- a/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/ChatCompletionAgentTests.cs @@ -2,27 +2,28 @@ using System; using System.ComponentModel; using System.Linq; +using System.Text; using System.Threading.Tasks; +using Azure.Identity; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; -using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Agents.OpenAI; +namespace SemanticKernel.IntegrationTests.Agents; #pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. -public sealed class ChatCompletionAgentTests(ITestOutputHelper output) : IDisposable +public sealed class ChatCompletionAgentTests() { private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() @@ -32,7 +33,7 @@ public sealed class ChatCompletionAgentTests(ITestOutputHelper output) : IDispos /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory] + [RetryTheory(typeof(HttpOperationException))] [InlineData("What is the special soup?", "Clam Chowder", false)] [InlineData("What is the special soup?", "Clam Chowder", true)] public async Task AzureChatCompletionAgentAsync(string input, string expectedAnswerContains, bool useAutoFunctionTermination) @@ -42,12 +43,10 @@ public async Task AzureChatCompletionAgentAsync(string input, string expectedAns KernelPlugin plugin = KernelPluginFactory.CreateFromType(); - this._kernelBuilder.Services.AddSingleton(this._logger); - this._kernelBuilder.AddAzureOpenAIChatCompletion( - configuration.ChatDeploymentName!, - configuration.Endpoint, - configuration.ApiKey); + deploymentName: configuration.ChatDeploymentName!, + endpoint: configuration.Endpoint, + credentials: new AzureCliCredential()); if (useAutoFunctionTermination) { @@ -94,13 +93,160 @@ public async Task AzureChatCompletionAgentAsync(string input, string expectedAns Assert.Contains(expectedAnswerContains, messages.Single().Content, StringComparison.OrdinalIgnoreCase); } - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); + /// + /// Integration test for using new function calling model + /// and targeting Azure OpenAI services. + /// + [RetryTheory(typeof(HttpOperationException))] + [InlineData("What is the special soup?", "Clam Chowder", false)] + [InlineData("What is the special soup?", "Clam Chowder", true)] + public async Task AzureChatCompletionAgentUsingNewFunctionCallingModelAsync(string input, string expectedAnswerContains, bool useAutoFunctionTermination) + { + // Arrange + AzureOpenAIConfiguration configuration = this._configuration.GetSection("AzureOpenAI").Get()!; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + this._kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: configuration.ChatDeploymentName!, + endpoint: configuration.Endpoint, + credentials: new AzureCliCredential()); + + if (useAutoFunctionTermination) + { + this._kernelBuilder.Services.AddSingleton(new AutoInvocationFilter()); + } + + this._kernelBuilder.Plugins.Add(plugin); + + Kernel kernel = this._kernelBuilder.Build(); + + ChatCompletionAgent agent = + new() + { + Kernel = kernel, + Instructions = "Answer questions about the menu.", + Arguments = new(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + }; + + AgentGroupChat chat = new(); + chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + + // Act + ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync(); + ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); + + // Assert + Assert.Single(messages); + + ChatMessageContent response = messages.Single(); + + if (useAutoFunctionTermination) + { + Assert.Equal(3, history.Length); + Assert.Single(response.Items.OfType()); + Assert.Single(response.Items.OfType()); + } + else + { + Assert.Equal(4, history.Length); + Assert.Single(response.Items); + Assert.Single(response.Items.OfType()); + } + + Assert.Contains(expectedAnswerContains, messages.Single().Content, StringComparison.OrdinalIgnoreCase); + } + + /// + /// Integration test for using function calling + /// and targeting Azure OpenAI services. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task AzureChatCompletionStreamingAsync() + { + // Arrange + AzureOpenAIConfiguration configuration = this._configuration.GetSection("AzureOpenAI").Get()!; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + this._kernelBuilder.AddAzureOpenAIChatCompletion( + configuration.ChatDeploymentName!, + configuration.Endpoint, + new AzureCliCredential()); + + this._kernelBuilder.Plugins.Add(plugin); + + Kernel kernel = this._kernelBuilder.Build(); + + ChatCompletionAgent agent = + new() + { + Kernel = kernel, + Instructions = "Answer questions about the menu.", + Arguments = new(new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }), + }; + + AgentGroupChat chat = new(); + chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "What is the special soup?")); + + // Act + StringBuilder builder = new(); + await foreach (var message in chat.InvokeStreamingAsync(agent)) + { + builder.Append(message.Content); + } + + ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); + + // Assert + Assert.Contains("Clam Chowder", builder.ToString(), StringComparison.OrdinalIgnoreCase); + Assert.Contains("Clam Chowder", history.First().Content, StringComparison.OrdinalIgnoreCase); + } - public void Dispose() + /// + /// Integration test for using new function calling model + /// and targeting Azure OpenAI services. + /// + [RetryFact(typeof(HttpOperationException))] + public async Task AzureChatCompletionStreamingUsingNewFunctionCallingModelAsync() { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); + // Arrange + AzureOpenAIConfiguration configuration = this._configuration.GetSection("AzureOpenAI").Get()!; + + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + this._kernelBuilder.AddAzureOpenAIChatCompletion( + configuration.ChatDeploymentName!, + configuration.Endpoint, + new AzureCliCredential()); + + this._kernelBuilder.Plugins.Add(plugin); + + Kernel kernel = this._kernelBuilder.Build(); + + ChatCompletionAgent agent = + new() + { + Kernel = kernel, + Instructions = "Answer questions about the menu.", + Arguments = new(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }), + }; + + AgentGroupChat chat = new(); + chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "What is the special soup?")); + + // Act + StringBuilder builder = new(); + await foreach (var message in chat.InvokeStreamingAsync(agent)) + { + builder.Append(message.Content); + } + + ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); + + // Assert + Assert.Contains("Clam Chowder", builder.ToString(), StringComparison.OrdinalIgnoreCase); + Assert.Contains("Clam Chowder", history.First().Content, StringComparison.OrdinalIgnoreCase); } public sealed class MenuPlugin diff --git a/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs new file mode 100644 index 000000000000..93a5f1a4f77a --- /dev/null +++ b/dotnet/src/IntegrationTests/Agents/MixedAgentTests.cs @@ -0,0 +1,176 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System; +using System.ClientModel; +using System.ComponentModel; +using System.Text; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Agents; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class MixedAgentTests +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// Integration test for using function calling + /// and targeting Open AI services. + /// + [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] + [InlineData(false)] + [InlineData(true)] + public async Task OpenAIMixedAgentTestAsync(bool useNewFunctionCallingModel) + { + OpenAIConfiguration openAISettings = this._configuration.GetSection("OpenAI").Get()!; + Assert.NotNull(openAISettings); + + // Arrange, Act & Assert + await this.VerifyAgentExecutionAsync( + this.CreateChatCompletionKernel(openAISettings), + OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(openAISettings.ApiKey)), + openAISettings.ChatModelId!, + useNewFunctionCallingModel); + } + + /// + /// Integration test for using function calling + /// and targeting Azure OpenAI services. + /// + [RetryTheory(typeof(HttpOperationException))] + [InlineData(false)] + [InlineData(true)] + public async Task AzureOpenAIMixedAgentAsync(bool useNewFunctionCallingModel) + { + AzureOpenAIConfiguration azureOpenAISettings = this._configuration.GetSection("AzureOpenAI").Get()!; + Assert.NotNull(azureOpenAISettings); + + // Arrange, Act & Assert + await this.VerifyAgentExecutionAsync( + this.CreateChatCompletionKernel(azureOpenAISettings), + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAISettings.Endpoint)), + azureOpenAISettings.ChatDeploymentName!, + useNewFunctionCallingModel); + } + + private async Task VerifyAgentExecutionAsync( + Kernel chatCompletionKernel, + OpenAIClientProvider config, + string modelName, + bool useNewFunctionCallingModel) + { + // Arrange + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + + var executionSettings = useNewFunctionCallingModel ? + new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() } : + new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Configure chat agent with the plugin. + ChatCompletionAgent chatAgent = + new() + { + Kernel = chatCompletionKernel, + Instructions = "Answer questions about the menu.", + Arguments = new(executionSettings), + }; + chatAgent.Kernel.Plugins.Add(plugin); + + // Assistant doesn't need plug-in since it has access to the shared function result. + OpenAIAssistantAgent assistantAgent = + await OpenAIAssistantAgent.CreateAsync( + config, + new(modelName) + { + Instructions = "Answer questions about the menu." + }, + new Kernel()); + + // Act & Assert + try + { + AgentGroupChat chat = new(chatAgent, assistantAgent); + await this.AssertAgentInvocationAsync(chat, chatAgent, "What is the special soup?", "Clam Chowder"); + await this.AssertAgentInvocationAsync(chat, assistantAgent, "What is the special drink?", "Chai Tea"); + } + finally + { + await assistantAgent.DeleteAsync(); + } + } + + private async Task AssertAgentInvocationAsync(AgentGroupChat chat, Agent agent, string input, string expected) + { + chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + + // Act + StringBuilder builder = new(); + await foreach (var message in chat.InvokeAsync(agent)) + { + builder.Append(message.Content); + } + + // Assert + Assert.Contains(expected, builder.ToString(), StringComparison.OrdinalIgnoreCase); + } + + private Kernel CreateChatCompletionKernel(AzureOpenAIConfiguration configuration) + { + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: configuration.ChatDeploymentName!, + endpoint: configuration.Endpoint, + credentials: new AzureCliCredential()); + + return kernelBuilder.Build(); + } + + private Kernel CreateChatCompletionKernel(OpenAIConfiguration configuration) + { + IKernelBuilder kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + configuration.ChatModelId!, + configuration.ApiKey); + + return kernelBuilder.Build(); + } + + public sealed class MenuPlugin + { + [KernelFunction, Description("Provides a list of specials from the menu.")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")] + public string GetSpecials() + { + return @" +Special Soup: Clam Chowder +Special Salad: Cobb Salad +Special Drink: Chai Tea +"; + } + + [KernelFunction, Description("Provides the price of the requested menu item.")] + public string GetItemPrice( + [Description("The name of the menu item.")] + string menuItem) + { + return "$9.99"; + } + } +} diff --git a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs index 20d6dcad9146..641c0bbd36f4 100644 --- a/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs +++ b/dotnet/src/IntegrationTests/Agents/OpenAIAssistantAgentTests.cs @@ -1,28 +1,28 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.ClientModel; using System.ComponentModel; +using System.Linq; using System.Text; using System.Threading.Tasks; +using Azure.Identity; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Agents; using Microsoft.SemanticKernel.Agents.OpenAI; using Microsoft.SemanticKernel.ChatCompletion; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; -using Xunit.Abstractions; -namespace SemanticKernel.IntegrationTests.Agents.OpenAI; +namespace SemanticKernel.IntegrationTests.Agents; #pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. -public sealed class OpenAIAssistantAgentTests(ITestOutputHelper output) : IDisposable +public sealed class OpenAIAssistantAgentTests { - private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() @@ -36,12 +36,12 @@ public sealed class OpenAIAssistantAgentTests(ITestOutputHelper output) : IDispo [InlineData("What is the special soup?", "Clam Chowder")] public async Task OpenAIAssistantAgentTestAsync(string input, string expectedAnswerContains) { - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); + OpenAIConfiguration openAISettings = this._configuration.GetSection("OpenAI").Get()!; + Assert.NotNull(openAISettings); await this.ExecuteAgentAsync( - new(openAIConfiguration.ApiKey), - openAIConfiguration.ModelId, + OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(openAISettings.ApiKey)), + openAISettings.ChatModelId!, input, expectedAnswerContains); } @@ -50,7 +50,7 @@ await this.ExecuteAgentAsync( /// Integration test for using function calling /// and targeting Azure OpenAI services. /// - [Theory(Skip = "No supported endpoint configured.")] + [RetryTheory(typeof(HttpOperationException))] [InlineData("What is the special soup?", "Clam Chowder")] public async Task AzureOpenAIAssistantAgentAsync(string input, string expectedAnswerContains) { @@ -58,57 +58,125 @@ public async Task AzureOpenAIAssistantAgentAsync(string input, string expectedAn Assert.NotNull(azureOpenAIConfiguration); await this.ExecuteAgentAsync( - new(azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.Endpoint), + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), + azureOpenAIConfiguration.ChatDeploymentName!, + input, + expectedAnswerContains); + } + + /// + /// Integration test for using function calling + /// and targeting Open AI services. + /// + [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] + [InlineData("What is the special soup?", "Clam Chowder")] + public async Task OpenAIAssistantAgentStreamingAsync(string input, string expectedAnswerContains) + { + OpenAIConfiguration openAISettings = this._configuration.GetSection("OpenAI").Get()!; + Assert.NotNull(openAISettings); + + await this.ExecuteStreamingAgentAsync( + OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(openAISettings.ApiKey)), + openAISettings.ModelId, + input, + expectedAnswerContains); + } + + /// + /// Integration test for using function calling + /// and targeting Azure OpenAI services. + /// + [RetryTheory(typeof(HttpOperationException))] + [InlineData("What is the special soup?", "Clam Chowder")] + public async Task AzureOpenAIAssistantAgentStreamingAsync(string input, string expectedAnswerContains) + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + await this.ExecuteStreamingAgentAsync( + OpenAIClientProvider.ForAzureOpenAI(new AzureCliCredential(), new Uri(azureOpenAIConfiguration.Endpoint)), azureOpenAIConfiguration.ChatDeploymentName!, input, expectedAnswerContains); } private async Task ExecuteAgentAsync( - OpenAIAssistantConfiguration config, + OpenAIClientProvider config, string modelName, string input, string expected) { // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); + Kernel kernel = new(); - Kernel kernel = this._kernelBuilder.Build(); + KernelPlugin plugin = KernelPluginFactory.CreateFromType(); + kernel.Plugins.Add(plugin); + + OpenAIAssistantAgent agent = + await OpenAIAssistantAgent.CreateAsync( + config, + new(modelName) + { + Instructions = "Answer questions about the menu.", + }, + kernel); + + try + { + AgentGroupChat chat = new(); + chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); + + // Act + StringBuilder builder = new(); + await foreach (var message in chat.InvokeAsync(agent)) + { + builder.Append(message.Content); + } + + // Assert + Assert.Contains(expected, builder.ToString(), StringComparison.OrdinalIgnoreCase); + } + finally + { + await agent.DeleteAsync(); + } + } + + private async Task ExecuteStreamingAgentAsync( + OpenAIClientProvider config, + string modelName, + string input, + string expected) + { + // Arrange + Kernel kernel = new(); KernelPlugin plugin = KernelPluginFactory.CreateFromType(); kernel.Plugins.Add(plugin); OpenAIAssistantAgent agent = await OpenAIAssistantAgent.CreateAsync( - kernel, config, - new() + new(modelName) { Instructions = "Answer questions about the menu.", - ModelId = modelName, - }); + }, + kernel); AgentGroupChat chat = new(); chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input)); // Act StringBuilder builder = new(); - await foreach (var message in chat.InvokeAsync(agent)) + await foreach (var message in chat.InvokeStreamingAsync(agent)) { builder.Append(message.Content); } // Assert + ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync(); Assert.Contains(expected, builder.ToString(), StringComparison.OrdinalIgnoreCase); - } - - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); + Assert.Contains(expected, history.First().Content, StringComparison.OrdinalIgnoreCase); } public sealed class MenuPlugin diff --git a/dotnet/src/IntegrationTests/Connectors/AzureAIInference/AzureAIInferenceChatCompletionServiceTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureAIInference/AzureAIInferenceChatCompletionServiceTests.cs new file mode 100644 index 000000000000..140e16fc97cc --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureAIInference/AzureAIInferenceChatCompletionServiceTests.cs @@ -0,0 +1,255 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using Azure.AI.Inference; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureAIInference; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureAIInference; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureAIInferenceChatCompletionServiceTests(ITestOutputHelper output) : BaseIntegrationTest, IDisposable +{ + private const string InputParameterName = "input"; + private readonly XunitLogger _loggerFactory = new(output); + private readonly RedirectOutput _testOutputHelper = new(output); + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Theory] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task InvokeGetChatMessageContentsAsync(string prompt, string expectedAnswerContains) + { + // Arrange + var config = this._configuration.GetSection("AzureAIInference").Get(); + Assert.NotNull(config); + + var sut = (config.ApiKey is not null) + ? new AzureAIInferenceChatCompletionService( + endpoint: config.Endpoint, + apiKey: config.ApiKey, + loggerFactory: this._loggerFactory) + : new AzureAIInferenceChatCompletionService( + modelId: null, + endpoint: config.Endpoint, + credential: new AzureCliCredential(), + loggerFactory: this._loggerFactory); + + ChatHistory chatHistory = [ + new ChatMessageContent(AuthorRole.User, prompt) + ]; + + // Act + var result = await sut.GetChatMessageContentsAsync(chatHistory); + + // Assert + Assert.Single(result); + Assert.Contains(expectedAnswerContains, result[0].Content, StringComparison.OrdinalIgnoreCase); + } + + [Theory] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task InvokeGetStreamingChatMessageContentsAsync(string prompt, string expectedAnswerContains) + { + // Arrange + var config = this._configuration.GetSection("AzureAIInference").Get(); + Assert.NotNull(config); + + var sut = (config.ApiKey is not null) + ? new AzureAIInferenceChatCompletionService( + endpoint: config.Endpoint, + apiKey: config.ApiKey, + loggerFactory: this._loggerFactory) + : new AzureAIInferenceChatCompletionService( + modelId: null, + endpoint: config.Endpoint, + credential: new AzureCliCredential(), + loggerFactory: this._loggerFactory); + + ChatHistory chatHistory = [ + new ChatMessageContent(AuthorRole.User, prompt) + ]; + + StringBuilder fullContent = new(); + + // Act + await foreach (var update in sut.GetStreamingChatMessageContentsAsync(chatHistory)) + { + fullContent.Append(update.Content); + } + + // Assert + Assert.Contains(expectedAnswerContains, fullContent.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ItCanUseChatForTextGenerationAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var func = kernel.CreateFunctionFromPrompt( + "List the two planets after '{{$input}}', excluding moons, using bullet points.", + new AzureAIInferencePromptExecutionSettings()); + + // Act + var result = await func.InvokeAsync(kernel, new() { [InputParameterName] = "Jupiter" }); + + // Assert + Assert.NotNull(result); + Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ItStreamingFromKernelTestAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + StringBuilder fullResult = new(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + } + + // Assert + Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ItHttpRetryPolicyTestAsync() + { + // Arrange + List statusCodes = []; + + var config = this._configuration.GetSection("AzureAIInference").Get(); + Assert.NotNull(config); + Assert.NotNull(config.Endpoint); + + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddAzureAIInferenceChatCompletion(endpoint: config.Endpoint, apiKey: null); + + kernelBuilder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + o.Retry.OnRetry = args => + { + statusCodes.Add(args.Outcome.Result?.StatusCode); + return ValueTask.CompletedTask; + }; + }); + }); + + var target = kernelBuilder.Build(); + + var plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + var exception = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); + + // Assert + Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s)); + Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode); + } + + [Fact] + public async Task ItShouldReturnInnerContentAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + var result = await kernel.InvokeAsync(plugins["FunPlugin"]["Limerick"]); + var content = result.GetValue(); + // Assert + Assert.NotNull(content); + Assert.NotNull(content.InnerContent); + + Assert.IsType(content.InnerContent); + var completions = (ChatCompletions)content.InnerContent; + var usage = completions.Usage; + + // Usage + Assert.NotEqual(0, usage.PromptTokens); + Assert.NotEqual(0, usage.CompletionTokens); + } + + [Theory(Skip = "This test is for manual verification.")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding) + { + // Arrange + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + // Act + FunctionResult actual = await kernel.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains("John", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null) + { + var config = this._configuration.GetSection("AzureAIInference").Get(); + Assert.NotNull(config); + Assert.NotNull(config.ApiKey); + Assert.NotNull(config.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureAIInferenceChatCompletion( + endpoint: config.Endpoint, + apiKey: config.ApiKey, + serviceId: config.ServiceId, + httpClient: httpClient); + + return kernelBuilder.Build(); + } + + public void Dispose() + { + this._loggerFactory.Dispose(); + this._testOutputHelper.Dispose(); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs new file mode 100644 index 000000000000..13275f8b85bf --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIAudioToTextTests.cs @@ -0,0 +1,53 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.IO; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.AudioToText; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAIAudioToTextTests() +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [RetryFact] + public async Task AzureOpenAIAudioToTextTestAsync() + { + // Arrange + const string Filename = "test_audio.wav"; + + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIAudioToText").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAIAudioToText( + deploymentName: azureOpenAIConfiguration.DeploymentName, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()) + .Build(); + + var service = kernel.GetRequiredService(); + + await using Stream audio = File.OpenRead($"./TestData/{Filename}"); + var audioData = await BinaryData.FromStreamAsync(audio); + + // Act + var result = await service.GetTextContentAsync(new AudioContent(audioData, mimeType: "audio/wav"), new OpenAIAudioToTextExecutionSettings(Filename)); + + // Assert + Assert.Contains("The sun rises in the east and sets in the west.", result.Text, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs new file mode 100644 index 000000000000..31c8810ac686 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionFunctionCallingTests.cs @@ -0,0 +1,1009 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAIChatCompletionFunctionCallingTests : BaseIntegrationTest +{ + [Fact] + public async Task CanAutoInvokeKernelFunctionsAsync() + { + // Arrange + var invokedFunctions = new List(); + + var filter = new FakeFunctionFilter(async (context, next) => + { + invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})"); + await next(context); + }); + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.FunctionInvocationFilters.Add(filter); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); + + // Assert + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsStreamingAsync() + { + // Arrange + var invokedFunctions = new List(); + + var filter = new FakeFunctionFilter(async (context, next) => + { + invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})"); + await next(context); + }); + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.FunctionInvocationFilters.Add(filter); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))) + { + stringBuilder.Append(update); + } + + // Assert + Assert.Contains("rain", stringBuilder.ToString(), StringComparison.InvariantCulture); + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod((WeatherParameters parameters) => + { + if (parameters.City.Name.Equals("Dublin", StringComparison.OrdinalIgnoreCase) && + (parameters.City.Country.Equals("Ireland", StringComparison.OrdinalIgnoreCase) || parameters.City.Country.Equals("IE", StringComparison.OrdinalIgnoreCase))) + { + return Task.FromResult(42.8); // 42.8 Fahrenheit. + } + + throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); + }, "Get_Current_Temperature", "Get current temperature."), + ]); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("What is the current temperature in Dublin, Ireland, in Fahrenheit?", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("42.8", result.GetValue(), StringComparison.InvariantCulture); // The WeatherPlugin always returns 42.8 for Dublin, Ireland. + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Convert 50 degrees Fahrenheit to Celsius.", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("10", result.GetValue(), StringComparison.InvariantCulture); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionsWithEnumTypeParametersAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); + + // Assert + Assert.NotNull(result); + Assert.Contains("rain", result.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionFromPromptAsync() + { + // Arrange + var invokedFunctions = new List(); + + var filter = new FakeFunctionFilter(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var kernel = this.CreateAndInitializeKernel(); + kernel.FunctionInvocationFilters.Add(filter); + + var promptFunction = KernelFunctionFactory.CreateFromPrompt( + "Hey LLM, give me one news title that's hot off the press!", + functionName: "FindLatestNews", + description: "Searches for the latest news."); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions( + "NewsProvider", + "Delivers up-to-date news content.", + [promptFunction])); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.InvokePromptAsync("Show me the latest news.", new(settings)); + + // Assert + Assert.Contains(invokedFunctions, functionName => functionName.Contains("InvokePromptAsync")); + Assert.Contains(invokedFunctions, functionName => functionName.Contains("FindLatestNews")); + } + + [Fact] + public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() + { + // Arrange + var invokedFunctions = new List(); + + var filter = new FakeFunctionFilter(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var kernel = this.CreateAndInitializeKernel(); + kernel.FunctionInvocationFilters.Add(filter); + + var promptFunction = KernelFunctionFactory.CreateFromPrompt( + "Hey LLM, give me one news title that's hot off the press!", + functionName: "FindLatestNews", + description: "Searches for the latest news."); + + kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions( + "NewsProvider", + "Delivers up-to-date news content.", + [promptFunction])); + + AzureOpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var streamingResult = kernel.InvokePromptStreamingAsync("Show me the latest news.", new(settings)); + await foreach (var update in streamingResult) + { + } + + // Assert + Assert.Contains(invokedFunctions, functionName => functionName.Contains("InvokePromptStreamingAsync")); + Assert.Contains(invokedFunctions, functionName => functionName.Contains("FindLatestNews")); + } + + [Fact] + public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Current way of handling function calls manually using connector specific chat message content class. + var toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + + while (toolCalls.Count > 0) + { + // Adding LLM function call request to chat history + chatHistory.Add(result); + + // Iterating over the requested function calls and invoking them + foreach (var toolCall in toolCalls) + { + string content = kernel.Plugins.TryGetFunctionAndArguments(toolCall, out KernelFunction? function, out KernelArguments? arguments) ? + JsonSerializer.Serialize((await function.InvokeAsync(kernel, arguments)).GetValue()) : + "Unable to find function. Please try again!"; + + // Adding the result of the function call to the chat history + chatHistory.Add(new ChatMessageContent( + AuthorRole.Tool, + content, + metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } })); + } + + // Sending the functions invocation results back to the LLM to get the final response + result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + } + + // Assert + Assert.Contains("rain", result.Content, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + var messageContent = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + var functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + + while (functionCalls.Length != 0) + { + // Adding function call from LLM to chat history + chatHistory.Add(messageContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + var result = await functionCall.InvokeAsync(kernel); + + chatHistory.Add(result.ToChatMessage()); + } + + // Sending the functions invocation results to the LLM to get the final response + messageContent = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + } + + // Assert + Assert.Contains("rain", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var completionService = kernel.GetRequiredService(); + + // Act + var messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + var functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + + while (functionCalls.Length != 0) + { + // Adding function call from LLM to chat history + chatHistory.Add(messageContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + // Simulating an exception + var exception = new OperationCanceledException("The operation was canceled due to timeout."); + + chatHistory.Add(new FunctionResultContent(functionCall, exception).ToChatMessage()); + } + + // Sending the functions execution results back to the LLM to get the final response + messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + } + + // Assert + Assert.NotNull(messageContent.Content); + TestHelpers.AssertChatErrorExcuseMessage(messageContent.Content); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var completionService = kernel.GetRequiredService(); + + // Act + var messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + var functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + + while (functionCalls.Length > 0) + { + // Adding function call from LLM to chat history + chatHistory.Add(messageContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + var result = await functionCall.InvokeAsync(kernel); + + chatHistory.AddMessage(AuthorRole.Tool, [result]); + } + + // Adding a simulated function call to the connector response message + var simulatedFunctionCall = new FunctionCallContent("weather-alert", id: "call_123"); + messageContent.Items.Add(simulatedFunctionCall); + + // Adding a simulated function result to chat history + var simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities."; + chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage()); + + // Sending the functions invocation results back to the LLM to get the final response + messageContent = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + functionCalls = FunctionCallContent.GetFunctionCalls(messageContent).ToArray(); + } + + // Assert + Assert.Contains("tornado", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ItFailsIfNoFunctionResultProvidedAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var completionService = kernel.GetRequiredService(); + + // Act + var result = await completionService.GetChatMessageContentAsync(chatHistory, settings, kernel); + + chatHistory.Add(result); + + var exception = await Assert.ThrowsAsync(() => completionService.GetChatMessageContentAsync(chatHistory, settings, kernel)); + + // Assert + Assert.Contains("'tool_calls' must be followed by tool", exception.Message, StringComparison.InvariantCulture); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + var userMessage = chatHistory[0]; + Assert.Equal(AuthorRole.User, userMessage.Role); + + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } + + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); + Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); + + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); + Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); + Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); + Assert.NotNull(getCurrentTimeFunctionCallResult.Result); + + Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); + Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getWeatherForCityFunctionCallResult = getWeatherForCityFunctionCallResultMessage.Items.OfType().Single(); + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallResult.PluginName); + Assert.Equal(getWeatherForCityFunctionCallRequest.Id, getWeatherForCityFunctionCallResult.CallId); + Assert.NotNull(getWeatherForCityFunctionCallResult.Result); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + string? result = null; + + // Act + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + var textContent = new StringBuilder(); + + await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + textContent.Append(streamingContent.Content); + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + var functionCalls = fccBuilder.Build(); + if (functionCalls.Any()) + { + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + fcContent.Items.Add(functionCall); + + var functionResult = await functionCall.InvokeAsync(kernel); + + chatHistory.Add(functionResult.ToChatMessage()); + } + + continue; + } + + result = textContent.ToString(); + break; + } + + // Assert + Assert.Contains("rain", result, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var result = new StringBuilder(); + + // Act + await foreach (var contentUpdate in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + result.Append(contentUpdate.Content); + } + + // Assert + var userMessage = chatHistory[0]; + Assert.Equal(AuthorRole.User, userMessage.Role); + + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } + + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); + Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); + + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); + Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); + Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); + Assert.NotNull(getCurrentTimeFunctionCallResult.Result); + + Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); + Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. + + var getWeatherForCityFunctionCallResult = getWeatherForCityFunctionCallResultMessage.Items.OfType().Single(); + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallResult.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallResult.PluginName); + Assert.Equal(getWeatherForCityFunctionCallRequest.Id, getWeatherForCityFunctionCallResult.CallId); + Assert.NotNull(getWeatherForCityFunctionCallResult.Result); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + string? result = null; + + // Act + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + var textContent = new StringBuilder(); + + await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + textContent.Append(streamingContent.Content); + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + var functionCalls = fccBuilder.Build(); + if (functionCalls.Any()) + { + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + fcContent.Items.Add(functionCall); + + // Simulating an exception + var exception = new OperationCanceledException("The operation was canceled due to timeout."); + + chatHistory.Add(new FunctionResultContent(functionCall, exception).ToChatMessage()); + } + + continue; + } + + result = textContent.ToString(); + break; + } + + // Assert + TestHelpers.AssertChatErrorExcuseMessage(result); + } + + [Fact] + public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsForStreamingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response."); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + string? result = null; + + // Act + while (true) + { + AuthorRole? authorRole = null; + var fccBuilder = new FunctionCallContentBuilder(); + var textContent = new StringBuilder(); + + await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) + { + textContent.Append(streamingContent.Content); + authorRole ??= streamingContent.Role; + fccBuilder.Append(streamingContent); + } + + var functionCalls = fccBuilder.Build(); + if (functionCalls.Any()) + { + var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null); + chatHistory.Add(fcContent); + + // Iterating over the requested function calls and invoking them + foreach (var functionCall in functionCalls) + { + fcContent.Items.Add(functionCall); + + var functionResult = await functionCall.InvokeAsync(kernel); + + chatHistory.Add(functionResult.ToChatMessage()); + } + + // Adding a simulated function call to the connector response message + var simulatedFunctionCall = new FunctionCallContent("weather-alert", id: "call_123"); + fcContent.Items.Add(simulatedFunctionCall); + + // Adding a simulated function result to chat history + var simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities."; + chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage()); + + continue; + } + + result = textContent.ToString(); + break; + } + + // Assert + Assert.Contains("tornado", result, StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task ItShouldSupportOldFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() + { + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove connector-agnostic function-calling items to check if the old function-calling model, which relies on function information in metadata, is handled correctly. + foreach (var chatMessage in chatHistory!) + { + var index = 0; + while (index < chatMessage.Items.Count) + { + var item = chatMessage.Items[index]; + if (item is FunctionCallContent || item is FunctionResultContent) + { + chatMessage.Items.Remove(item); + continue; + } + index++; + } + } + + string? emailBody = null, emailRecipient = null; + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [ + KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail"), + KernelFunctionFactory.CreateFromMethod(() => "abc@domain.com", "GetMyEmail") + ]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email."); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61", emailBody); + } + + [Fact] + public async Task ItShouldSupportNewFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() + { + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove metadata related to the old function-calling model to check if the new model, which relies on function call content/result classes, is handled correctly. + foreach (var chatMessage in chatHistory!) + { + if (chatMessage.Metadata is not null) + { + var metadata = new Dictionary(chatMessage.Metadata); + metadata.Remove(OpenAIChatMessageContent.ToolIdProperty); + metadata.Remove("ChatResponseMessage.FunctionToolCalls"); + chatMessage.Metadata = metadata; + } + } + + string? emailBody = null, emailRecipient = null; + + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [ + KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail"), + KernelFunctionFactory.CreateFromMethod(() => "abc@domain.com", "GetMyEmail") + ]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email."); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61", emailBody); + } + + /// + /// This test verifies that the connector can handle the scenario where the assistant response message is added to the chat history. + /// The assistant response message with no function calls added to chat history caused the error: HTTP 400 (invalid_request_error:) [] should be non-empty - 'messages.3.tool_calls' + /// + [Fact] + public async Task AssistantResponseAddedToChatHistoryShouldBeHandledCorrectlyAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); + + var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + var sut = kernel.GetRequiredService(); + + // Act + var assistanceResponse = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + chatHistory.Add(assistanceResponse); // Adding assistance response to chat history. + chatHistory.AddUserMessage("Return only the color name."); + + await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + } + + [Fact] + public async Task SubsetOfFunctionsCanBeUsedForFunctionCallingAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var function = kernel.CreateFunctionFromMethod(() => DayOfWeek.Friday.ToString(), "GetDayOfWeek", "Retrieves the current day of the week."); + kernel.ImportPluginFromFunctions("HelperFunctions", [function]); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What day is today?"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableFunctions([function.Metadata.ToOpenAIFunction()], true) }; + + var sut = kernel.GetRequiredService(); + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.NotNull(result); + Assert.Contains("Friday", result.Content, StringComparison.InvariantCulture); + } + + [Fact] + public async Task RequiredFunctionShouldBeCalledAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var function = kernel.CreateFunctionFromMethod(() => DayOfWeek.Friday.ToString(), "GetDayOfWeek", "Retrieves the current day of the week."); + kernel.ImportPluginFromFunctions("HelperFunctions", [function]); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What day is today?"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(function.Metadata.ToOpenAIFunction(), true) }; + + var sut = kernel.GetRequiredService(); + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.NotNull(result); + Assert.Contains("Friday", result.Content, StringComparison.InvariantCulture); + } + + private Kernel CreateAndInitializeKernel(bool importHelperPlugin = false) + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); + + var kernel = kernelBuilder.Build(); + + if (importHelperPlugin) + { + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), + kernel.CreateFunctionFromMethod((string cityName) => + { + return cityName switch + { + "Boston" => "61 and rainy", + _ => "31 and snowing", + }; + }, "Get_Weather_For_City", "Gets the current weather for the specified city"), + ]); + } + + return kernel; + } + + public record WeatherParameters(City City); + + public class City + { + public string Name { get; set; } = string.Empty; + public string Country { get; set; } = string.Empty; + } + + private sealed class FakeFunctionFilter : IFunctionInvocationFilter + { + private readonly Func, Task>? _onFunctionInvocation; + + public FakeFunctionFilter( + Func, Task>? onFunctionInvocation = null) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next) => + this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionNonStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionNonStreamingTests.cs new file mode 100644 index 000000000000..3a84b0bb931e --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionNonStreamingTests.cs @@ -0,0 +1,172 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureOpenAIChatCompletionNonStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await chatCompletion.GetChatMessageContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + // Act + var result = await chatCompletion.GetChatMessageContentAsync(chatHistory, null, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokenCount", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokenCount", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await textGeneration.GetTextContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var result = await textGeneration.GetTextContentAsync("Reply \"I don't know\" to every question. What is the capital of France?", null, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokenCount", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokenCount", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs new file mode 100644 index 000000000000..e59b33a05f82 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionStreamingTests.cs @@ -0,0 +1,173 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureOpenAIChatCompletionStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update.Content); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, null, kernel)) + { + stringBuilder.Append(update.Content); + + foreach (var key in update.Metadata!.Keys) + { + metadata[key] = update.Metadata[key]; + } + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("CompletionId", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", null, kernel)) + { + stringBuilder.Append(update); + + foreach (var key in update.Metadata!.Keys) + { + metadata[key] = update.Metadata[key]; + } + } + + // Assert + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("CompletionId", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs new file mode 100644 index 000000000000..0bba93b02ccc --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletionTests.cs @@ -0,0 +1,273 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class AzureOpenAIChatCompletionTests : BaseIntegrationTest +{ + [Fact] + //[Fact(Skip = "Skipping while we investigate issue with GitHub actions.")] + public async Task ItCanUseAzureOpenAiChatForTextGenerationAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var func = kernel.CreateFunctionFromPrompt( + "List the two planets after '{{$input}}', excluding moons, using bullet points.", + new AzureOpenAIPromptExecutionSettings()); + + // Act + var result = await func.InvokeAsync(kernel, new() { [InputParameterName] = "Jupiter" }); + + // Assert + Assert.NotNull(result); + Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task AzureOpenAIStreamingTestAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + StringBuilder fullResult = new(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + } + + // Assert + Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task AzureOpenAIHttpRetryPolicyTestAsync() + { + // Arrange + List statusCodes = []; + + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration!.ChatDeploymentName!, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + apiKey: "INVALID_KEY"); + + kernelBuilder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + o.Retry.OnRetry = args => + { + statusCodes.Add(args.Outcome.Result?.StatusCode); + return ValueTask.CompletedTask; + }; + }); + }); + + var target = kernelBuilder.Build(); + + var plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + var exception = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); + + // Assert + Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s)); + Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode); + } + + [Fact] + public async Task AzureOpenAIShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + var result = await kernel.InvokeAsync(plugins["FunPlugin"]["Limerick"]); + + // Assert + Assert.NotNull(result.Metadata); + + // Usage + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokenCount", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokenCount", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + } + + [Theory(Skip = "This test is for manual verification.")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding) + { + // Arrange + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + // Act + FunctionResult actual = await kernel.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains("John", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ChatSystemPromptIsNotIgnoredAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var settings = new AzureOpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); + + // Assert + Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task SemanticKernelVersionHeaderIsSentAsync() + { + // Arrange + using var defaultHandler = new HttpClientHandler(); + using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler); + using var httpClient = new HttpClient(httpHeaderHandler); + + var kernel = this.CreateAndInitializeKernel(httpClient); + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?"); + + // Assert + Assert.NotNull(httpHeaderHandler.RequestHeaders); + Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values)); + } + + //[Theory(Skip = "This test is for manual verification.")] + [Theory] + [InlineData(null, null)] + [InlineData(false, null)] + [InlineData(true, 2)] + [InlineData(true, 5)] + public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs) + { + // Arrange + var settings = new AzureOpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs }; + + var kernel = this.CreateAndInitializeKernel(); + + // Act + var result = await kernel.InvokePromptAsync("Hi, can you help me today?", new(settings)); + + var logProbabilityInfo = result.Metadata?["ContentTokenLogProbabilities"] as IReadOnlyList; + + // Assert + Assert.NotNull(logProbabilityInfo); + + if (logprobs is true) + { + Assert.NotNull(logProbabilityInfo); + Assert.Equal(topLogprobs, logProbabilityInfo[0].TopLogProbabilities.Count); + } + else + { + Assert.Empty(logProbabilityInfo); + } + } + + #region internals + + private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null) + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + Assert.NotNull(azureOpenAIConfiguration.ServiceId); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential(), + serviceId: azureOpenAIConfiguration.ServiceId, + httpClient: httpClient); + + return kernelBuilder.Build(); + } + + private const string InputParameterName = "input"; + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler) + { + public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + this.RequestHeaders = request.Headers; + return await base.SendAsync(request, cancellationToken); + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..6e723aceaccc --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs @@ -0,0 +1,376 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAIAutoFunctionChoiceBehaviorTests : BaseIntegrationTest +{ + private readonly Kernel _kernel; + private readonly FakeFunctionFilter _autoFunctionInvocationFilter; + private readonly IChatCompletionService _chatCompletionService; + + public AzureOpenAIAutoFunctionChoiceBehaviorTests() + { + this._autoFunctionInvocationFilter = new FakeFunctionFilter(); + + this._kernel = this.InitializeKernel(); + this._kernel.AutoFunctionInvocationFilters.Add(this._autoFunctionInvocationFilter); + this._chatCompletionService = this._kernel.GetRequiredService(); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: auto + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + var result = await this._kernel.InvokeAsync(promptFunction); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + string result = ""; + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + result += content; + } + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: auto + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + StringBuilder result = new(); + + // Act + await foreach (string update in promptFunction.InvokeStreamingAsync(this._kernel)) + { + result.Append(update); + } + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto([plugin.First()], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyForStreamingAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto([plugin.First()], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + private Kernel InitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// A plugin that returns the current time. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DateTimeUtils +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("Retrieves the current date.")] + public string GetCurrentDate() => DateTime.UtcNow.ToString("d", CultureInfo.InvariantCulture); + } + + #region private + + private sealed class FakeFunctionFilter : IAutoFunctionInvocationFilter + { + private Func, Task>? _onFunctionInvocation; + + public void RegisterFunctionInvocationHandler(Func, Task> onFunctionInvocation) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + if (this._onFunctionInvocation is null) + { + return next(context); + } + + return this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NoneFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NoneFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..c6285185b8c2 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_NoneFunctionChoiceBehaviorTests.cs @@ -0,0 +1,223 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Text; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAINoneFunctionChoiceBehaviorTests : BaseIntegrationTest +{ + private readonly Kernel _kernel; + private readonly FakeFunctionFilter _autoFunctionInvocationFilter; + + public AzureOpenAINoneFunctionChoiceBehaviorTests() + { + this._autoFunctionInvocationFilter = new FakeFunctionFilter(); + + this._kernel = this.InitializeKernel(); + this._kernel.AutoFunctionInvocationFilters.Add(this._autoFunctionInvocationFilter); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorNotToInvokeKernelFunctionAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); + this._kernel.Plugins.Add(plugin); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + // Act + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + var result = await this._kernel.InvokePromptAsync("How many days until Christmas?", new(settings)); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorNotToInvokeKernelFunctionAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: none + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + var result = await this._kernel.InvokeAsync(promptFunction); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorNotToInvokeKernelFunctionForStreamingAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); + this._kernel.Plugins.Add(plugin); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + StringBuilder result = new(); + + // Act + await foreach (string update in this._kernel.InvokePromptStreamingAsync("How many days until Christmas?", new(settings))) + { + result.Append(update); + } + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorNotToInvokeKernelFunctionForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: none + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + StringBuilder result = new(); + + // Act + await foreach (string update in promptFunction.InvokeStreamingAsync(this._kernel)) + { + result.Append(update); + } + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + } + + private Kernel InitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// A plugin that returns the current time. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DateTimeUtils +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("Retrieves the current date.")] + public string GetCurrentDate() => DateTime.UtcNow.ToString("d", CultureInfo.InvariantCulture); + } + + #region private + + private sealed class FakeFunctionFilter : IAutoFunctionInvocationFilter + { + private Func, Task>? _onFunctionInvocation; + + public void RegisterFunctionInvocationHandler(Func, Task> onFunctionInvocation) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + if (this._onFunctionInvocation is null) + { + return next(context); + } + + return this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_RequiredFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_RequiredFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..b155b9204e47 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAIChatCompletion_RequiredFunctionChoiceBehaviorTests.cs @@ -0,0 +1,462 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAIRequiredFunctionChoiceBehaviorTests : BaseIntegrationTest +{ + private readonly Kernel _kernel; + private readonly FakeFunctionFilter _autoFunctionInvocationFilter; + private readonly IChatCompletionService _chatCompletionService; + + public AzureOpenAIRequiredFunctionChoiceBehaviorTests() + { + this._autoFunctionInvocationFilter = new FakeFunctionFilter(); + + this._kernel = this.InitializeKernel(); + this._kernel.AutoFunctionInvocationFilters.Add(this._autoFunctionInvocationFilter); + this._chatCompletionService = this._kernel.GetRequiredService(); + } + + //[Fact] + // This test should be uncommented when the solution to dynamically control list of functions to advertise to the model is implemented. + //public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + //{ + // // Arrange + // this._kernel.ImportPluginFromType(); + + // var invokedFunctions = new List(); + + // IReadOnlyList? SelectFunctions(FunctionChoiceBehaviorFunctionsSelectorContext context) + // { + // // Get all function names that have been invoked + // var invokedFunctionNames = context.ChatHistory + // .SelectMany(m => m.Items.OfType()) + // .Select(i => i.FunctionName); + + // invokedFunctions.AddRange(invokedFunctionNames); + + // if (invokedFunctionNames.Contains("GetCurrentDate")) + // { + // return []; // Don't advertise any more functions because the expected function has been invoked. + // } + + // return context.Functions; + // } + + // var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true, functionsSelector: SelectFunctions) }; + + // var chatHistory = new ChatHistory(); + // chatHistory.AddUserMessage("How many days until Christmas?"); + + // // Act + // var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // // Assert + // Assert.NotNull(result); + + // Assert.Single(invokedFunctions); + // Assert.Contains("GetCurrentDate", invokedFunctions); + //} + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: required + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + var result = await this._kernel.InvokeAsync(promptFunction); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + //[Fact] + //This test should be uncommented when the solution to dynamically control list of functions to advertise to the model is implemented. + //public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + //{ + // // Arrange + // this._kernel.ImportPluginFromType(); + + // var invokedFunctions = new List(); + + // IReadOnlyList? SelectFunctions(FunctionChoiceBehaviorFunctionsSelectorContext context) + // { + // // Get all function names that have been invoked + // var invokedFunctionNames = context.ChatHistory + // .SelectMany(m => m.Items.OfType()) + // .Select(i => i.FunctionName); + + // invokedFunctions.AddRange(invokedFunctionNames); + + // if (invokedFunctionNames.Contains("GetCurrentDate")) + // { + // return []; // Don't advertise any more functions because the expected function has been invoked. + // } + + // return context.Functions; + // } + + // var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true, functionsSelector: SelectFunctions) }; + + // var chatHistory = new ChatHistory(); + // chatHistory.AddUserMessage("How many days until Christmas?"); + + // string result = ""; + + // // Act + // await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + // { + // result += content; + // } + + // // Assert + // Assert.NotNull(result); + + // Assert.Single(invokedFunctions); + // Assert.Contains("GetCurrentDate", invokedFunctions); + //} + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + string result = ""; + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + result += content; + } + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: required + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + string result = ""; + + // Act + await foreach (string c in promptFunction.InvokeStreamingAsync(this._kernel)) + { + result += c; + } + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new AzureOpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required([plugin.First()], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyForStreamingAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new AzureOpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required([plugin.First()], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + private Kernel InitializeKernel() + { + var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); + Assert.NotNull(azureOpenAIConfiguration); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); + Assert.NotNull(azureOpenAIConfiguration.Endpoint); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// A plugin that returns the current time. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DateTimeUtils +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("Retrieves the current date.")] + public string GetCurrentDate() => DateTime.UtcNow.ToString("d", CultureInfo.InvariantCulture); + } + + #region private + + private sealed class FakeFunctionFilter : IAutoFunctionInvocationFilter + { + private Func, Task>? _onFunctionInvocation; + + public void RegisterFunctionInvocationHandler(Func, Task> onFunctionInvocation) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + if (this._onFunctionInvocation is null) + { + return next(context); + } + + return this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs new file mode 100644 index 000000000000..3e459e6917c5 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextEmbeddingTests.cs @@ -0,0 +1,72 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; +using Microsoft.SemanticKernel.Embeddings; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAITextEmbeddingTests +{ + public AzureOpenAITextEmbeddingTests() + { + var config = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); + Assert.NotNull(config); + this._azureOpenAIConfiguration = config; + } + + [Theory] + [InlineData("test sentence")] + public async Task AzureOpenAITestAsync(string testInputString) + { + // Arrange + var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( + deploymentName: this._azureOpenAIConfiguration.DeploymentName, + endpoint: this._azureOpenAIConfiguration.Endpoint, + credential: new AzureCliCredential()); + + // Act + var singleResult = await embeddingGenerator.GenerateEmbeddingAsync(testInputString); + var batchResult = await embeddingGenerator.GenerateEmbeddingsAsync([testInputString]); + + // Assert + Assert.Equal(AdaVectorLength, singleResult.Length); + Assert.Single(batchResult); + } + + [Theory] + [InlineData(null, 3072)] + [InlineData(1024, 1024)] + public async Task AzureOpenAIWithDimensionsAsync(int? dimensions, int expectedVectorLength) + { + // Arrange + const string TestInputString = "test sentence"; + + var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( + deploymentName: "text-embedding-3-large", + endpoint: this._azureOpenAIConfiguration.Endpoint, + credential: new AzureCliCredential(), + dimensions: dimensions); + + // Act + var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString); + + // Assert + Assert.Equal(expectedVectorLength, result.Length); + } + + private readonly AzureOpenAIConfiguration _azureOpenAIConfiguration; + + private const int AdaVectorLength = 1536; + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs new file mode 100644 index 000000000000..881daf5fe3ba --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToAudioTests.cs @@ -0,0 +1,45 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.TextToAudio; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAITextToAudioTests +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Fact] + public async Task AzureOpenAITextToAudioTestAsync() + { + // Arrange + AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToAudio").Get(); + Assert.NotNull(azureOpenAIConfiguration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAITextToAudio( + deploymentName: azureOpenAIConfiguration.DeploymentName, + endpoint: azureOpenAIConfiguration.Endpoint, + credential: new AzureCliCredential()) + .Build(); + + var service = kernel.GetRequiredService(); + + // Act + var result = await service.GetAudioContentAsync("The sun rises in the east and sets in the west."); + + // Assert + var audioData = result.Data!.Value; + Assert.False(audioData.IsEmpty); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs new file mode 100644 index 000000000000..ac3f6d020c55 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/AzureOpenAI/AzureOpenAITextToImageTests.cs @@ -0,0 +1,74 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Threading.Tasks; +using Azure.Identity; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextToImage; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +#pragma warning disable CS0618 // Type or member is obsolete + +namespace SemanticKernel.IntegrationTests.Connectors.AzureOpenAI; + +public sealed class AzureOpenAITextToImageTests +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Fact] + public async Task ItCanReturnImageUrlAsync() + { + // Arrange + AzureOpenAIConfiguration? configuration = this._configuration.GetSection("AzureOpenAITextToImage").Get(); + Assert.NotNull(configuration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAITextToImage( + deploymentName: configuration.DeploymentName, + endpoint: configuration.Endpoint, + credentials: new AzureCliCredential()) + .Build(); + + var service = kernel.GetRequiredService(); + + // Act + var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024); + + // Assert + Assert.NotNull(result); + Assert.StartsWith("https://", result); + } + + [Fact] + public async Task GetImageContentsCanReturnImageUrlAsync() + { + // Arrange + AzureOpenAIConfiguration? configuration = this._configuration.GetSection("AzureOpenAITextToImage").Get(); + Assert.NotNull(configuration); + + var kernel = Kernel.CreateBuilder() + .AddAzureOpenAITextToImage( + deploymentName: configuration.DeploymentName, + endpoint: configuration.Endpoint, + credentials: new AzureCliCredential()) + .Build(); + + var service = kernel.GetRequiredService(); + + // Act + var result = await service.GetImageContentsAsync("The sun rises in the east and sets in the west.", new OpenAITextToImageExecutionSettings { Size = (1024, 1024) }); + + // Assert + Assert.NotNull(result); + Assert.NotEmpty(result); + Assert.NotEmpty(result[0].Uri!.ToString()); + Assert.StartsWith("https://", result[0].Uri!.ToString()); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs index 19158ce56e4f..4f328612abaf 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreFixture.cs @@ -58,9 +58,9 @@ public AzureAISearchVectorStoreFixture() new VectorStoreRecordDataProperty("Description", typeof(string)), new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4 }, new VectorStoreRecordDataProperty("Tags", typeof(string[])) { IsFilterable = true }, - new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool?)) { IsFilterable = true }, + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool?)) { IsFilterable = true, StoragePropertyName = "parking_is_included" }, new VectorStoreRecordDataProperty("LastRenovationDate", typeof(DateTimeOffset?)) { IsFilterable = true }, - new VectorStoreRecordDataProperty("Rating", typeof(float?)) + new VectorStoreRecordDataProperty("Rating", typeof(double?)) } }; } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs index 7f810dc87fbd..447f75e90a27 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureAISearch/AzureAISearchVectorStoreRecordCollectionTests.cs @@ -308,6 +308,59 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); } + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + var options = new AzureAISearchVectorStoreRecordCollectionOptions> + { + VectorStoreRecordDefinition = fixture.VectorStoreRecordDefinition + }; + var sut = new AzureAISearchVectorStoreRecordCollection>(fixture.SearchIndexClient, fixture.TestIndexName, options); + + // Act + var baseSetGetResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true }); + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "Description", "This is a generic mapper hotel" }, + { "Tags", new string[] { "generic" } }, + { "ParkingIncluded", false }, + { "LastRenovationDate", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, + { "Rating", 3.6d } + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) } + } + }); + var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.NotNull(baseSetGetResult); + Assert.Equal("Hotel 1", baseSetGetResult.Data["HotelName"]); + Assert.Equal("This is a great hotel", baseSetGetResult.Data["Description"]); + Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult.Data["Tags"]); + Assert.False((bool?)baseSetGetResult.Data["ParkingIncluded"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), baseSetGetResult.Data["LastRenovationDate"]); + Assert.Equal(3.6d, baseSetGetResult.Data["Rating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + + Assert.NotNull(upsertResult); + Assert.Equal("GenericMapper-1", upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.Equal(new[] { "generic" }, localGetResult.Data["Tags"]); + Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["LastRenovationDate"]); + Assert.Equal(3.6d, localGetResult.Data["Rating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + } + private static Hotel CreateTestHotel(string hotelId) => new() { HotelId = hotelId, diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs index 1b1255c46b68..6854e7e7fdf8 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBMemoryStoreTestsFixture.cs @@ -54,7 +54,7 @@ public async Task DisposeAsync() private static string GetSetting(IConfigurationRoot configuration, string settingName) { - var settingValue = configuration[$"AzureCosmosDB:{settingName}"]; + var settingValue = configuration[$"AzureCosmosDBMongoDB:{settingName}"]; if (string.IsNullOrWhiteSpace(settingValue)) { throw new ArgumentNullException($"{settingValue} string is not configured"); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionFixture.cs new file mode 100644 index 000000000000..43f30eb5d520 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreCollectionFixture.cs @@ -0,0 +1,9 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; + +[CollectionDefinition("AzureCosmosDBMongoDBVectorStoreCollection")] +public class AzureCosmosDBMongoDBVectorStoreCollectionFixture : ICollectionFixture +{ } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs new file mode 100644 index 000000000000..6a54d6983a6d --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreFixture.cs @@ -0,0 +1,134 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Data; +using MongoDB.Driver; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; + +public class AzureCosmosDBMongoDBVectorStoreFixture : IAsyncLifetime +{ + private readonly List _testCollections = ["sk-test-hotels", "sk-test-contacts", "sk-test-addresses"]; + + /// Main test collection for tests. + public string TestCollection => this._testCollections[0]; + + /// that can be used to manage the collections in Azure CosmosDB MongoDB. + public IMongoDatabase MongoDatabase { get; } + + /// Gets the manually created vector store record definition for Azure CosmosDB MongoDB test model. + public VectorStoreRecordDefinition HotelVectorStoreRecordDefinition { get; private set; } + + /// + /// Initializes a new instance of the class. + /// + public AzureCosmosDBMongoDBVectorStoreFixture() + { + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile( + path: "testsettings.development.json", + optional: false, + reloadOnChange: true + ) + .AddEnvironmentVariables() + .Build(); + + var connectionString = GetConnectionString(configuration); + var client = new MongoClient(connectionString); + + this.MongoDatabase = client.GetDatabase("test"); + + this.HotelVectorStoreRecordDefinition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(string)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)), + new VectorStoreRecordDataProperty("HotelCode", typeof(int)), + new VectorStoreRecordDataProperty("ParkingIncluded", typeof(bool)) { StoragePropertyName = "parking_is_included" }, + new VectorStoreRecordDataProperty("HotelRating", typeof(float)), + new VectorStoreRecordDataProperty("Tags", typeof(List)), + new VectorStoreRecordDataProperty("Timestamp", typeof(DateTime)), + new VectorStoreRecordDataProperty("Description", typeof(string)), + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.IvfFlat, DistanceFunction = DistanceFunction.CosineDistance } + ] + }; + } + + public async Task InitializeAsync() + { + foreach (var collection in this._testCollections) + { + await this.MongoDatabase.CreateCollectionAsync(collection); + } + } + + public async Task DisposeAsync() + { + foreach (var collection in this._testCollections) + { + await this.MongoDatabase.DropCollectionAsync(collection); + } + } + +#pragma warning disable CS8618 + public record AzureCosmosDBMongoDBHotel() + { + /// The key of the record. + [VectorStoreRecordKey] + public string HotelId { get; init; } + + /// A string metadata field. + [VectorStoreRecordData] + public string? HotelName { get; set; } + + /// An int metadata field. + [VectorStoreRecordData] + public int HotelCode { get; set; } + + /// A float metadata field. + [VectorStoreRecordData] + public float? HotelRating { get; set; } + + /// A bool metadata field. + [VectorStoreRecordData(StoragePropertyName = "parking_is_included")] + public bool ParkingIncluded { get; set; } + + /// An array metadata field. + [VectorStoreRecordData] + public List Tags { get; set; } = []; + + /// A data field. + [VectorStoreRecordData] + public string Description { get; set; } + + /// A datetime metadata field. + [VectorStoreRecordData] + public DateTime Timestamp { get; set; } + + /// A vector field. + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.IvfFlat, DistanceFunction: DistanceFunction.CosineDistance)] + public ReadOnlyMemory? DescriptionEmbedding { get; set; } + } +#pragma warning restore CS8618 + + #region private + + private static string GetConnectionString(IConfigurationRoot configuration) + { + var settingValue = configuration["AzureCosmosDBMongoDB:ConnectionString"]; + if (string.IsNullOrWhiteSpace(settingValue)) + { + throw new ArgumentNullException($"{settingValue} string is not configured"); + } + + return settingValue; + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..1296e2983c01 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreRecordCollectionTests.cs @@ -0,0 +1,440 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Microsoft.SemanticKernel.Data; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using Xunit; +using static SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB.AzureCosmosDBMongoDBVectorStoreFixture; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; + +[Collection("AzureCosmosDBMongoDBVectorStoreCollection")] +public class AzureCosmosDBMongoDBVectorStoreRecordCollectionTests(AzureCosmosDBMongoDBVectorStoreFixture fixture) +{ + private const string? SkipReason = "Azure CosmosDB MongoDB cluster is required"; + + [Theory(Skip = SkipReason)] + [InlineData("sk-test-hotels", true)] + [InlineData("nonexistentcollection", false)] + public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) + { + // Arrange + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + + // Act + var actual = await sut.CollectionExistsAsync(); + + // Assert + Assert.Equal(expectedExists, actual); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanCreateCollectionAsync() + { + // Arrange + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + // Act + await sut.CreateCollectionAsync(); + + // Assert + Assert.True(await sut.CollectionExistsAsync()); + } + + [Theory(Skip = SkipReason)] + [InlineData(true, true)] + [InlineData(true, false)] + [InlineData(false, true)] + [InlineData(false, false)] + public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bool useRecordDefinition) + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + + var collectionNamePostfix = useRecordDefinition ? "with-definition" : "with-type"; + var collectionName = $"collection-{collectionNamePostfix}"; + + var options = new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions + { + VectorStoreRecordDefinition = useRecordDefinition ? fixture.HotelVectorStoreRecordDefinition : null + }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, collectionName); + + var record = this.CreateTestHotel(HotelId); + + // Act + await sut.CreateCollectionAsync(); + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(HotelId, new() { IncludeVectors = includeVectors }); + + // Assert + Assert.True(await sut.CollectionExistsAsync()); + await sut.DeleteCollectionAsync(); + + Assert.Equal(HotelId, upsertResult); + Assert.NotNull(getResult); + + Assert.Equal(record.HotelId, getResult.HotelId); + Assert.Equal(record.HotelName, getResult.HotelName); + Assert.Equal(record.HotelCode, getResult.HotelCode); + Assert.Equal(record.HotelRating, getResult.HotelRating); + Assert.Equal(record.ParkingIncluded, getResult.ParkingIncluded); + Assert.Equal(record.Tags.ToArray(), getResult.Tags.ToArray()); + Assert.Equal(record.Description, getResult.Description); + Assert.Equal(record.Timestamp.ToUniversalTime(), getResult.Timestamp.ToUniversalTime()); + + if (includeVectors) + { + Assert.NotNull(getResult.DescriptionEmbedding); + Assert.Equal(record.DescriptionEmbedding!.Value.ToArray(), getResult.DescriptionEmbedding.Value.ToArray()); + } + else + { + Assert.Null(getResult.DescriptionEmbedding); + } + } + + [Fact(Skip = SkipReason)] + public async Task ItCanDeleteCollectionAsync() + { + // Arrange + const string TempCollectionName = "temp-test"; + await fixture.MongoDatabase.CreateCollectionAsync(TempCollectionName); + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, TempCollectionName); + + Assert.True(await sut.CollectionExistsAsync()); + + // Act + await sut.DeleteCollectionAsync(); + + // Assert + Assert.False(await sut.CollectionExistsAsync()); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanGetAndDeleteRecordAsync() + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + var record = this.CreateTestHotel(HotelId); + + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(HotelId); + + Assert.Equal(HotelId, upsertResult); + Assert.NotNull(getResult); + + // Act + await sut.DeleteAsync(HotelId); + + getResult = await sut.GetAsync(HotelId); + + // Assert + Assert.Null(getResult); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanGetAndDeleteBatchAsync() + { + // Arrange + const string HotelId1 = "11111111-1111-1111-1111-111111111111"; + const string HotelId2 = "22222222-2222-2222-2222-222222222222"; + const string HotelId3 = "33333333-3333-3333-3333-333333333333"; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + var record1 = this.CreateTestHotel(HotelId1); + var record2 = this.CreateTestHotel(HotelId2); + var record3 = this.CreateTestHotel(HotelId3); + + var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + + Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); + + Assert.NotNull(getResults.First(l => l.HotelId == HotelId1)); + Assert.NotNull(getResults.First(l => l.HotelId == HotelId2)); + Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); + + // Act + await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + + getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + + // Assert + Assert.Empty(getResults); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertRecordAsync() + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + var record = this.CreateTestHotel(HotelId); + + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(HotelId); + + Assert.Equal(HotelId, upsertResult); + Assert.NotNull(getResult); + + // Act + record.HotelName = "Updated name"; + record.HotelRating = 10; + + upsertResult = await sut.UpsertAsync(record); + getResult = await sut.GetAsync(HotelId); + + // Assert + Assert.NotNull(getResult); + Assert.Equal("Updated name", getResult.HotelName); + Assert.Equal(10, getResult.HotelRating); + } + + [Fact(Skip = SkipReason)] + public async Task UpsertWithModelWorksCorrectlyAsync() + { + // Arrange + var definition = new VectorStoreRecordDefinition + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Id", typeof(string)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)) + } + }; + + var model = new TestModel { Id = "key", HotelName = "Test Name" }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + fixture.MongoDatabase, + fixture.TestCollection, + new() { VectorStoreRecordDefinition = definition }); + + // Act + var upsertResult = await sut.UpsertAsync(model); + var getResult = await sut.GetAsync(model.Id); + + // Assert + Assert.Equal("key", upsertResult); + + Assert.NotNull(getResult); + Assert.Equal("key", getResult.Id); + Assert.Equal("Test Name", getResult.HotelName); + } + + [Fact(Skip = SkipReason)] + public async Task UpsertWithVectorStoreModelWorksCorrectlyAsync() + { + // Arrange + var model = new VectorStoreTestModel { HotelId = "key", HotelName = "Test Name" }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + // Act + var upsertResult = await sut.UpsertAsync(model); + var getResult = await sut.GetAsync(model.HotelId); + + // Assert + Assert.Equal("key", upsertResult); + + Assert.NotNull(getResult); + Assert.Equal("key", getResult.HotelId); + Assert.Equal("Test Name", getResult.HotelName); + } + + [Fact(Skip = SkipReason)] + public async Task UpsertWithBsonModelWorksCorrectlyAsync() + { + // Arrange + var definition = new VectorStoreRecordDefinition + { + Properties = new List + { + new VectorStoreRecordKeyProperty("Id", typeof(string)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)) + } + }; + + var model = new BsonTestModel { Id = "key", HotelName = "Test Name" }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection( + fixture.MongoDatabase, + fixture.TestCollection, + new() { VectorStoreRecordDefinition = definition }); + + // Act + var upsertResult = await sut.UpsertAsync(model); + var getResult = await sut.GetAsync(model.Id); + + // Assert + Assert.Equal("key", upsertResult); + + Assert.NotNull(getResult); + Assert.Equal("key", getResult.Id); + Assert.Equal("Test Name", getResult.HotelName); + } + + [Fact(Skip = SkipReason)] + public async Task UpsertWithBsonVectorStoreModelWorksCorrectlyAsync() + { + // Arrange + var model = new BsonVectorStoreTestModel { HotelId = "key", HotelName = "Test Name" }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + // Act + var upsertResult = await sut.UpsertAsync(model); + var getResult = await sut.GetAsync(model.HotelId); + + // Assert + Assert.Equal("key", upsertResult); + + Assert.NotNull(getResult); + Assert.Equal("key", getResult.HotelId); + Assert.Equal("Test Name", getResult.HotelName); + } + + [Fact(Skip = SkipReason)] + public async Task UpsertWithBsonVectorStoreWithNameModelWorksCorrectlyAsync() + { + // Arrange + var model = new BsonVectorStoreWithNameTestModel { Id = "key", HotelName = "Test Name" }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection(fixture.MongoDatabase, fixture.TestCollection); + + // Act + var upsertResult = await sut.UpsertAsync(model); + var getResult = await sut.GetAsync(model.Id); + + // Assert + Assert.Equal("key", upsertResult); + + Assert.NotNull(getResult); + Assert.Equal("key", getResult.Id); + Assert.Equal("Test Name", getResult.HotelName); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + var options = new AzureCosmosDBMongoDBVectorStoreRecordCollectionOptions> + { + VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition + }; + + var sut = new AzureCosmosDBMongoDBVectorStoreRecordCollection>(fixture.MongoDatabase, fixture.TestCollection, options); + + // Act + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "Description", "This is a generic mapper hotel" }, + { "Tags", new string[] { "generic" } }, + { "ParkingIncluded", false }, + { "Timestamp", new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime() }, + { "HotelRating", 3.6f } + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } + } + }); + + var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.NotNull(upsertResult); + Assert.Equal("GenericMapper-1", upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.Equal(new[] { "generic" }, localGetResult.Data["Tags"]); + Assert.False((bool?)localGetResult.Data["ParkingIncluded"]); + Assert.Equal(new DateTime(1970, 1, 18, 0, 0, 0).ToUniversalTime(), localGetResult.Data["Timestamp"]); + Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + } + + #region private + + private AzureCosmosDBMongoDBHotel CreateTestHotel(string hotelId) + { + return new AzureCosmosDBMongoDBHotel + { + HotelId = hotelId, + HotelName = $"My Hotel {hotelId}", + HotelCode = 42, + HotelRating = 4.5f, + ParkingIncluded = true, + Tags = { "t1", "t2" }, + Description = "This is a great hotel.", + Timestamp = new DateTime(2024, 09, 23, 15, 32, 33), + DescriptionEmbedding = new[] { 30f, 31f, 32f, 33f }, + }; + } + + private sealed class TestModel + { + public string? Id { get; set; } + + public string? HotelName { get; set; } + } + + private sealed class VectorStoreTestModel + { + [VectorStoreRecordKey] + public string? HotelId { get; set; } + + [VectorStoreRecordData(StoragePropertyName = "hotel_name")] + public string? HotelName { get; set; } + } + + private sealed class BsonTestModel + { + [BsonId] + public string? Id { get; set; } + + [BsonElement("hotel_name")] + public string? HotelName { get; set; } + } + + private sealed class BsonVectorStoreTestModel + { + [BsonId] + [VectorStoreRecordKey] + public string? HotelId { get; set; } + + [BsonElement("hotel_name")] + [VectorStoreRecordData] + public string? HotelName { get; set; } + } + + private sealed class BsonVectorStoreWithNameTestModel + { + [BsonId] + [VectorStoreRecordKey] + public string? Id { get; set; } + + [BsonElement("bson_hotel_name")] + [VectorStoreRecordData(StoragePropertyName = "storage_hotel_name")] + public string? HotelName { get; set; } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreTests.cs new file mode 100644 index 000000000000..9be1378b7b86 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBMongoDB/AzureCosmosDBMongoDBVectorStoreTests.cs @@ -0,0 +1,29 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBMongoDB; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.AzureCosmosDBMongoDB; + +[Collection("AzureCosmosDBMongoDBVectorStoreCollection")] +public class AzureCosmosDBMongoDBVectorStoreTests(AzureCosmosDBMongoDBVectorStoreFixture fixture) +{ + private const string? SkipReason = "Azure CosmosDB MongoDB cluster is required"; + + [Fact(Skip = SkipReason)] + public async Task ItCanGetAListOfExistingCollectionNamesAsync() + { + // Arrange + var sut = new AzureCosmosDBMongoDBVectorStore(fixture.MongoDatabase); + + // Act + var collectionNames = await sut.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Contains("sk-test-hotels", collectionNames); + Assert.Contains("sk-test-contacts", collectionNames); + Assert.Contains("sk-test-addresses", collectionNames); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs new file mode 100644 index 000000000000..f358caa0e501 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLHotel.cs @@ -0,0 +1,50 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Data; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +#pragma warning disable CS8618 + +public record AzureCosmosDBNoSQLHotel() +{ + /// The key of the record. + [VectorStoreRecordKey] + public string HotelId { get; init; } + + /// A string metadata field. + [VectorStoreRecordData(IsFilterable = true)] + public string? HotelName { get; set; } + + /// An int metadata field. + [VectorStoreRecordData(IsFullTextSearchable = true)] + public int HotelCode { get; set; } + + /// A float metadata field. + [VectorStoreRecordData] + public float? HotelRating { get; set; } + + /// A bool metadata field. + [JsonPropertyName("parking_is_included")] + [VectorStoreRecordData] + public bool ParkingIncluded { get; set; } + + /// An array metadata field. + [VectorStoreRecordData] + public List Tags { get; set; } = []; + + /// A data field. + [VectorStoreRecordData] + public string Description { get; set; } + + /// A datetime field. + [VectorStoreRecordData] + public DateTimeOffset Timestamp { get; set; } + + /// A vector field. + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.Flat, DistanceFunction: DistanceFunction.CosineSimilarity)] + public ReadOnlyMemory? DescriptionEmbedding { get; set; } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs index 1df46166e63f..7e6f376a8684 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs @@ -47,7 +47,7 @@ public Task DisposeAsync() private static string GetSetting(IConfigurationRoot configuration, string settingName) { - var settingValue = configuration[$"AzureCosmosDB:{settingName}"]; + var settingValue = configuration[$"AzureCosmosDBNoSQL:{settingName}"]; if (string.IsNullOrWhiteSpace(settingValue)) { throw new ArgumentNullException($"{settingValue} string is not configured"); diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionFixture.cs new file mode 100644 index 000000000000..9702cdda490d --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreCollectionFixture.cs @@ -0,0 +1,9 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +[CollectionDefinition("AzureCosmosDBNoSQLVectorStoreCollection")] +public class AzureCosmosDBNoSQLVectorStoreCollectionFixture : ICollectionFixture +{ } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs new file mode 100644 index 000000000000..85e5a90c384d --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreFixture.cs @@ -0,0 +1,79 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Azure.Cosmos; +using Microsoft.Extensions.Configuration; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +public class AzureCosmosDBNoSQLVectorStoreFixture : IAsyncLifetime, IDisposable +{ + private const string DatabaseName = "testdb"; + + private readonly CosmosClient _cosmosClient; + + /// that can be used to manage the collections in Azure CosmosDB NoSQL. + public Database? Database { get; private set; } + + public AzureCosmosDBNoSQLVectorStoreFixture() + { + var configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile( + path: "testsettings.development.json", + optional: false, + reloadOnChange: true + ) + .AddEnvironmentVariables() + .Build(); + + var connectionString = GetConnectionString(configuration); + var options = new CosmosClientOptions { UseSystemTextJsonSerializerWithOptions = JsonSerializerOptions.Default }; + + this._cosmosClient = new CosmosClient(connectionString, options); + } + + public async Task InitializeAsync() + { + await this._cosmosClient.CreateDatabaseIfNotExistsAsync(DatabaseName); + + this.Database = this._cosmosClient.GetDatabase(DatabaseName); + } + + public async Task DisposeAsync() + { + await this.Database!.DeleteAsync(); + } + + public void Dispose() + { + this.Dispose(true); + GC.SuppressFinalize(this); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + this._cosmosClient.Dispose(); + } + } + + #region private + + private static string GetConnectionString(IConfigurationRoot configuration) + { + var settingValue = configuration["AzureCosmosDBNoSQL:ConnectionString"]; + if (string.IsNullOrWhiteSpace(settingValue)) + { + throw new ArgumentNullException($"{settingValue} string is not configured"); + } + + return settingValue; + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..b47802b796b2 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreRecordCollectionTests.cs @@ -0,0 +1,347 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Microsoft.SemanticKernel.Data; +using Xunit; +using DistanceFunction = Microsoft.SemanticKernel.Data.DistanceFunction; +using IndexKind = Microsoft.SemanticKernel.Data.IndexKind; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +/// +/// Integration tests for class. +/// +[Collection("AzureCosmosDBNoSQLVectorStoreCollection")] +public sealed class AzureCosmosDBNoSQLVectorStoreRecordCollectionTests(AzureCosmosDBNoSQLVectorStoreFixture fixture) +{ + private const string? SkipReason = "Azure CosmosDB NoSQL cluster is required"; + + [Fact(Skip = SkipReason)] + public async Task ItCanCreateCollectionAsync() + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "test-create-collection"); + + // Act + await sut.CreateCollectionAsync(); + + // Assert + Assert.True(await sut.CollectionExistsAsync()); + } + + [Theory(Skip = SkipReason)] + [InlineData("sk-test-hotels", true)] + [InlineData("nonexistentcollection", false)] + public async Task CollectionExistsReturnsCollectionStateAsync(string collectionName, bool expectedExists) + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, collectionName); + + if (expectedExists) + { + await fixture.Database!.CreateContainerIfNotExistsAsync(new ContainerProperties(collectionName, "/id")); + } + + // Act + var actual = await sut.CollectionExistsAsync(); + + // Assert + Assert.Equal(expectedExists, actual); + } + + [Theory(Skip = SkipReason)] + [InlineData(true, true)] + [InlineData(true, false)] + [InlineData(false, true)] + [InlineData(false, false)] + public async Task ItCanCreateCollectionUpsertAndGetAsync(bool includeVectors, bool useRecordDefinition) + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + + var collectionNamePostfix = useRecordDefinition ? "with-definition" : "with-type"; + collectionNamePostfix = includeVectors ? $"{collectionNamePostfix}-with-vectors" : $"{collectionNamePostfix}-without-vectors"; + var collectionName = $"collection-{collectionNamePostfix}"; + + var options = new AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions + { + VectorStoreRecordDefinition = useRecordDefinition ? this.GetTestHotelRecordDefinition() : null + }; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, collectionName); + + var record = this.CreateTestHotel(HotelId); + + // Act + await sut.CreateCollectionAsync(); + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(HotelId, new() { IncludeVectors = includeVectors }); + + // Assert + Assert.True(await sut.CollectionExistsAsync()); + await sut.DeleteCollectionAsync(); + + Assert.Equal(HotelId, upsertResult); + Assert.NotNull(getResult); + + Assert.Equal(record.HotelId, getResult.HotelId); + Assert.Equal(record.HotelName, getResult.HotelName); + Assert.Equal(record.HotelCode, getResult.HotelCode); + Assert.Equal(record.HotelRating, getResult.HotelRating); + Assert.Equal(record.ParkingIncluded, getResult.ParkingIncluded); + Assert.Equal(record.Tags.ToArray(), getResult.Tags.ToArray()); + Assert.Equal(record.Description, getResult.Description); + Assert.Equal(record.Timestamp, getResult.Timestamp); + + if (includeVectors) + { + Assert.NotNull(getResult.DescriptionEmbedding); + Assert.Equal(record.DescriptionEmbedding!.Value.ToArray(), getResult.DescriptionEmbedding.Value.ToArray()); + } + else + { + Assert.Null(getResult.DescriptionEmbedding); + } + } + + [Fact(Skip = SkipReason)] + public async Task ItCanDeleteCollectionAsync() + { + // Arrange + const string TempCollectionName = "test-delete-collection"; + await fixture.Database!.CreateContainerAsync(new ContainerProperties(TempCollectionName, "/id")); + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, TempCollectionName); + + Assert.True(await sut.CollectionExistsAsync()); + + // Act + await sut.DeleteCollectionAsync(); + + // Assert + Assert.False(await sut.CollectionExistsAsync()); + } + + [Theory(Skip = SkipReason)] + [InlineData("consistent-mode-collection", IndexingMode.Consistent)] + [InlineData("lazy-mode-collection", IndexingMode.Lazy)] + [InlineData("none-mode-collection", IndexingMode.None)] + public async Task ItCanGetAndDeleteRecordAsync(string collectionName, IndexingMode indexingMode) + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection( + fixture.Database!, + collectionName, + new() { IndexingMode = indexingMode, Automatic = indexingMode != IndexingMode.None }); + + await sut.CreateCollectionAsync(); + + var record = this.CreateTestHotel(HotelId); + + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(HotelId); + + Assert.Equal(HotelId, upsertResult); + Assert.NotNull(getResult); + + // Act + await sut.DeleteAsync(HotelId); + + getResult = await sut.GetAsync(HotelId); + + // Assert + Assert.Null(getResult); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanGetAndDeleteRecordWithPartitionKeyAsync() + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + const string HotelName = "Test Hotel Name"; + + IVectorStoreRecordCollection sut = + new AzureCosmosDBNoSQLVectorStoreRecordCollection( + fixture.Database!, + "delete-with-partition-key", + new() { PartitionKeyPropertyName = "HotelName" }); + + await sut.CreateCollectionAsync(); + + var record = this.CreateTestHotel(HotelId, HotelName); + + var upsertResult = await sut.UpsertAsync(record); + + var key = new AzureCosmosDBNoSQLCompositeKey(record.HotelId, record.HotelName!); + var getResult = await sut.GetAsync(key); + + Assert.Equal(HotelId, upsertResult.RecordKey); + Assert.Equal(HotelName, upsertResult.PartitionKey); + Assert.NotNull(getResult); + + // Act + await sut.DeleteAsync(key); + + getResult = await sut.GetAsync(key); + + // Assert + Assert.Null(getResult); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanGetAndDeleteBatchAsync() + { + // Arrange + const string HotelId1 = "11111111-1111-1111-1111-111111111111"; + const string HotelId2 = "22222222-2222-2222-2222-222222222222"; + const string HotelId3 = "33333333-3333-3333-3333-333333333333"; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "get-and-delete-batch"); + + await sut.CreateCollectionAsync(); + + var record1 = this.CreateTestHotel(HotelId1); + var record2 = this.CreateTestHotel(HotelId2); + var record3 = this.CreateTestHotel(HotelId3); + + var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + + Assert.Equal([HotelId1, HotelId2, HotelId3], upsertResults); + + Assert.NotNull(getResults.First(l => l.HotelId == HotelId1)); + Assert.NotNull(getResults.First(l => l.HotelId == HotelId2)); + Assert.NotNull(getResults.First(l => l.HotelId == HotelId3)); + + // Act + await sut.DeleteBatchAsync([HotelId1, HotelId2, HotelId3]); + + getResults = await sut.GetBatchAsync([HotelId1, HotelId2, HotelId3]).ToListAsync(); + + // Assert + Assert.Empty(getResults); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertRecordAsync() + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection(fixture.Database!, "upsert-record"); + + await sut.CreateCollectionAsync(); + + var record = this.CreateTestHotel(HotelId); + + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(HotelId); + + Assert.Equal(HotelId, upsertResult); + Assert.NotNull(getResult); + + // Act + record.HotelName = "Updated name"; + record.HotelRating = 10; + + upsertResult = await sut.UpsertAsync(record); + getResult = await sut.GetAsync(HotelId); + + // Assert + Assert.NotNull(getResult); + Assert.Equal("Updated name", getResult.HotelName); + Assert.Equal(10, getResult.HotelRating); + } + + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + const string HotelId = "55555555-5555-5555-5555-555555555555"; + var options = new AzureCosmosDBNoSQLVectorStoreRecordCollectionOptions> + { + VectorStoreRecordDefinition = this.GetTestHotelRecordDefinition() + }; + + var sut = new AzureCosmosDBNoSQLVectorStoreRecordCollection>(fixture.Database!, "generic-mapper", options); + + await sut.CreateCollectionAsync(); + + // Act + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(HotelId) + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "Description", "This is a generic mapper hotel" }, + { "Tags", new List { "generic" } }, + { "parking_is_included", false }, + { "Timestamp", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, + { "HotelRating", 3.6f } + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } + } + }); + + var localGetResult = await sut.GetAsync(HotelId, new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.NotNull(upsertResult); + Assert.Equal(HotelId, upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.Equal(new List { "generic" }, localGetResult.Data["Tags"]); + Assert.False((bool?)localGetResult.Data["parking_is_included"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["Timestamp"]); + Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + } + + #region private + + private AzureCosmosDBNoSQLHotel CreateTestHotel(string hotelId, string? hotelName = null) + { + return new AzureCosmosDBNoSQLHotel + { + HotelId = hotelId, + HotelName = hotelName ?? $"My Hotel {hotelId}", + HotelCode = 42, + HotelRating = 4.5f, + ParkingIncluded = true, + Tags = { "t1", "t2" }, + Description = "This is a great hotel.", + DescriptionEmbedding = new[] { 30f, 31f, 32f, 33f }, + Timestamp = new DateTimeOffset(2024, 9, 24, 17, 28, 32, TimeSpan.Zero), + }; + } + + private VectorStoreRecordDefinition GetTestHotelRecordDefinition() + { + return new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(string)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)), + new VectorStoreRecordDataProperty("HotelCode", typeof(int)), + new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)), + new VectorStoreRecordDataProperty("HotelRating", typeof(float)), + new VectorStoreRecordDataProperty("Tags", typeof(List)), + new VectorStoreRecordDataProperty("Description", typeof(string)), + new VectorStoreRecordDataProperty("Timestamp", typeof(DateTimeOffset)), + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.CosineSimilarity } + ] + }; + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs new file mode 100644 index 000000000000..938fe5c14caf --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLVectorStoreTests.cs @@ -0,0 +1,35 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Azure.Cosmos; +using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.AzureCosmosDBNoSQL; + +/// +/// Integration tests for . +/// +[Collection("AzureCosmosDBNoSQLVectorStoreCollection")] +public sealed class AzureCosmosDBNoSQLVectorStoreTests(AzureCosmosDBNoSQLVectorStoreFixture fixture) +{ + private const string? SkipReason = "Azure CosmosDB NoSQL cluster is required"; + + [Fact(Skip = SkipReason)] + public async Task ItCanGetAListOfExistingCollectionNamesAsync() + { + // Arrange + var sut = new AzureCosmosDBNoSQLVectorStore(fixture.Database!); + + await fixture.Database!.CreateContainerIfNotExistsAsync(new ContainerProperties("list-names-1", "/id")); + await fixture.Database!.CreateContainerIfNotExistsAsync(new ContainerProperties("list-names-2", "/id")); + + // Act + var collectionNames = await sut.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Contains("list-names-1", collectionNames); + Assert.Contains("list-names-2", collectionNames); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs index 411225101ffc..f4846cda5427 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Pinecone/PineconeVectorStoreRecordCollectionTests.cs @@ -410,7 +410,7 @@ public void UseRecordWithNoEmbeddingThrows() "Whatever")); Assert.Equal( - $"No vector property found on type {typeof(PineconeRecordNoEmbedding).FullName}.", + $"No vector property found on type {nameof(PineconeRecordNoEmbedding)} or the provided VectorStoreRecordDefinition while at least one is required.", exception.Message); } @@ -434,7 +434,7 @@ public void UseRecordWithMultipleEmbeddingsThrows() "Whatever")); Assert.Equal( - $"Multiple vector properties found on type {typeof(PineconeRecordMultipleEmbeddings).FullName} while only one is supported.", + $"Multiple vector properties found on type {nameof(PineconeRecordMultipleEmbeddings)} or the provided VectorStoreRecordDefinition while only one is supported.", exception.Message); } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs index 7e2e9b1f7d78..278500613db9 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Qdrant/QdrantVectorStoreRecordCollectionTests.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Threading.Tasks; @@ -351,6 +352,62 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() await Assert.ThrowsAsync(async () => await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = true })); } + [Fact] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + var options = new QdrantVectorStoreRecordCollectionOptions> + { + VectorStoreRecordDefinition = fixture.HotelVectorStoreRecordDefinition + }; + var sut = new QdrantVectorStoreRecordCollection>(fixture.QdrantClient, "singleVectorHotels", options); + + // Act + var baseSetGetResult = await sut.GetAsync(11, new GetRecordOptions { IncludeVectors = true }); + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(40) + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "HotelCode", 40 }, + { "ParkingIncluded", false }, + { "HotelRating", 3.6d }, + { "Tags", new string[] { "generic" } }, + { "Description", "This is a generic mapper hotel" }, + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) } + } + }); + var localGetResult = await sut.GetAsync(40, new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.NotNull(baseSetGetResult); + Assert.Equal(11ul, baseSetGetResult.Key); + Assert.Equal("My Hotel 11", baseSetGetResult.Data["HotelName"]); + Assert.Equal(11, baseSetGetResult.Data["HotelCode"]); + Assert.True((bool)baseSetGetResult.Data["ParkingIncluded"]!); + Assert.Equal(4.5f, baseSetGetResult.Data["HotelRating"]); + Assert.Equal(new[] { "t1", "t2" }, ((List)baseSetGetResult.Data["Tags"]!).ToArray()); + Assert.Equal("This is a great hotel.", baseSetGetResult.Data["Description"]); + Assert.NotNull(baseSetGetResult.Vectors["DescriptionEmbedding"]); + Assert.IsType>(baseSetGetResult.Vectors["DescriptionEmbedding"]); + + Assert.Equal(40ul, upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal(40ul, localGetResult.Key); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal(40, localGetResult.Data["HotelCode"]); + Assert.False((bool)localGetResult.Data["ParkingIncluded"]!); + Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); + Assert.Equal(new[] { "generic" }, ((List)localGetResult.Data["Tags"]!).ToArray()); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.NotNull(localGetResult.Vectors["DescriptionEmbedding"]); + Assert.IsType>(localGetResult.Vectors["DescriptionEmbedding"]); + } + private HotelInfo CreateTestHotel(uint hotelId) { return new HotelInfo diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs index 6c980693c4bc..f564cec92b75 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisHashSetVectorStoreRecordCollectionTests.cs @@ -313,6 +313,59 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); } + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + var options = new RedisHashSetVectorStoreRecordCollectionOptions> + { + PrefixCollectionNameToKeyNames = true, + VectorStoreRecordDefinition = fixture.BasicVectorStoreRecordDefinition + }; + var sut = new RedisHashSetVectorStoreRecordCollection>(fixture.Database, TestCollectionName, options); + + // Act + var baseSetGetResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true }); + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "HotelCode", 40 }, + { "ParkingIncluded", true }, + { "Rating", 3.6d }, + { "Description", "This is a generic mapper hotel" }, + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) } + } + }); + var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.NotNull(baseSetGetResult); + Assert.Equal("BaseSet-1", baseSetGetResult.Key); + Assert.Equal("My Hotel 1", baseSetGetResult.Data["HotelName"]); + Assert.Equal(1, baseSetGetResult.Data["HotelCode"]); + Assert.True((bool)baseSetGetResult.Data["ParkingIncluded"]!); + Assert.Equal(3.6d, baseSetGetResult.Data["Rating"]); + Assert.Equal("This is a great hotel.", baseSetGetResult.Data["Description"]); + Assert.NotNull(baseSetGetResult.Vectors["DescriptionEmbedding"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + + Assert.Equal("GenericMapper-1", upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal("GenericMapper-1", localGetResult.Key); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal(40, localGetResult.Data["HotelCode"]); + Assert.True((bool)localGetResult.Data["ParkingIncluded"]!); + Assert.Equal(3.6d, localGetResult.Data["Rating"]); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + } + private static BasicHotel CreateTestHotel(string hotelId, int hotelCode) { var record = new BasicHotel diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs index 4fbd7bc5d647..317e62eaa36a 100644 --- a/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Redis/RedisJsonVectorStoreRecordCollectionTests.cs @@ -339,6 +339,71 @@ public async Task ItThrowsMappingExceptionForFailedMapperAsync() await Assert.ThrowsAsync(async () => await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true })); } + [Fact(Skip = SkipReason)] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + var options = new RedisJsonVectorStoreRecordCollectionOptions> + { + PrefixCollectionNameToKeyNames = true, + VectorStoreRecordDefinition = fixture.VectorStoreRecordDefinition + }; + var sut = new RedisJsonVectorStoreRecordCollection>(fixture.Database, TestCollectionName, options); + + // Act + var baseSetGetResult = await sut.GetAsync("BaseSet-1", new GetRecordOptions { IncludeVectors = true }); + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel("GenericMapper-1") + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "HotelCode", 1 }, + { "Tags", new[] { "generic 1", "generic 2" } }, + { "FTSTags", new[] { "generic 1", "generic 2" } }, + { "ParkingIncluded", true }, + { "LastRenovationDate", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, + { "Rating", 3.6 }, + { "Address", new HotelAddress { City = "Seattle", Country = "USA" } }, + { "Description", "This is a generic mapper hotel" }, + { "DescriptionEmbedding", new[] { 30f, 31f, 32f, 33f } } + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory(new[] { 30f, 31f, 32f, 33f }) } + } + }); + var localGetResult = await sut.GetAsync("GenericMapper-1", new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.NotNull(baseSetGetResult); + Assert.Equal("BaseSet-1", baseSetGetResult.Key); + Assert.Equal("My Hotel 1", baseSetGetResult.Data["HotelName"]); + Assert.Equal(1, baseSetGetResult.Data["HotelCode"]); + Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult.Data["Tags"]); + Assert.Equal(new[] { "pool", "air conditioning", "concierge" }, baseSetGetResult.Data["FTSTags"]); + Assert.True((bool)baseSetGetResult.Data["ParkingIncluded"]!); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), baseSetGetResult.Data["LastRenovationDate"]); + Assert.Equal(3.6, baseSetGetResult.Data["Rating"]); + Assert.Equal("Seattle", ((HotelAddress)baseSetGetResult.Data["Address"]!).City); + Assert.Equal("This is a great hotel.", baseSetGetResult.Data["Description"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)baseSetGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + + Assert.Equal("GenericMapper-1", upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal("GenericMapper-1", localGetResult.Key); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal(1, localGetResult.Data["HotelCode"]); + Assert.Equal(new[] { "generic 1", "generic 2" }, localGetResult.Data["Tags"]); + Assert.Equal(new[] { "generic 1", "generic 2" }, localGetResult.Data["FTSTags"]); + Assert.True((bool)localGetResult.Data["ParkingIncluded"]!); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["LastRenovationDate"]); + Assert.Equal(3.6d, localGetResult.Data["Rating"]); + Assert.Equal("Seattle", ((HotelAddress)localGetResult.Data["Address"]!).City); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + } + private static Hotel CreateTestHotel(string hotelId, int hotelCode) { var address = new HotelAddress { City = "Seattle", Country = "USA" }; diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs new file mode 100644 index 000000000000..e50be0e3e89d --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateHotel.cs @@ -0,0 +1,49 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using Microsoft.SemanticKernel.Data; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; + +#pragma warning disable CS8618 + +public sealed record WeaviateHotel +{ + /// The key of the record. + [VectorStoreRecordKey] + public Guid HotelId { get; init; } + + /// A string metadata field. + [VectorStoreRecordData(IsFilterable = true)] + public string? HotelName { get; set; } + + /// An int metadata field. + [VectorStoreRecordData] + public int HotelCode { get; set; } + + /// A float metadata field. + [VectorStoreRecordData] + public float? HotelRating { get; set; } + + /// A bool metadata field. + [JsonPropertyName("parking_is_included")] + [VectorStoreRecordData] + public bool ParkingIncluded { get; set; } + + /// An array metadata field. + [VectorStoreRecordData] + public List Tags { get; set; } = []; + + /// A data field. + [VectorStoreRecordData(IsFullTextSearchable = true)] + public string Description { get; set; } + + [VectorStoreRecordData] + public DateTimeOffset Timestamp { get; set; } + + /// A vector field. + [VectorStoreRecordVector(Dimensions: 4, IndexKind: IndexKind.Hnsw, DistanceFunction: DistanceFunction.CosineDistance)] + public ReadOnlyMemory? DescriptionEmbedding { get; set; } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs similarity index 96% rename from dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs rename to dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs index b8cad556d3f7..b88795e9a3d6 100644 --- a/dotnet/src/IntegrationTests/Connectors/Weaviate/WeaviateMemoryStoreTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateMemoryStoreTests.cs @@ -9,7 +9,7 @@ using Microsoft.SemanticKernel.Memory; using Xunit; -namespace SemanticKernel.IntegrationTests.Connectors.Weaviate; +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; /// /// Tests for collection and upsert operations. @@ -96,7 +96,7 @@ public async Task ItListsCollectionsAsync() await this._weaviateMemoryStore.CreateCollectionAsync(collectionName); Assert.True(await this._weaviateMemoryStore.DoesCollectionExistAsync(collectionName)); - Assert.Single((await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync())); + Assert.Single(await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync()); var collectionName2 = "SK" + Guid.NewGuid(); await this._weaviateMemoryStore.CreateCollectionAsync(collectionName2); @@ -110,17 +110,17 @@ public async Task ItDeletesCollectionAsync() { await this.DeleteAllClassesAsync(); - Assert.Empty((await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync())); + Assert.Empty(await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync()); var collectionName = "SK" + Guid.NewGuid(); await this._weaviateMemoryStore.CreateCollectionAsync(collectionName); Assert.True(await this._weaviateMemoryStore.DoesCollectionExistAsync(collectionName)); - Assert.Single((await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync())); + Assert.Single(await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync()); await this._weaviateMemoryStore.DeleteCollectionAsync(collectionName); Assert.False(await this._weaviateMemoryStore.DoesCollectionExistAsync(collectionName)); - Assert.Empty((await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync())); + Assert.Empty(await this._weaviateMemoryStore.GetCollectionsAsync().ToListAsync()); } [Fact(Skip = SkipReason)] diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreCollectionFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreCollectionFixture.cs new file mode 100644 index 000000000000..2f3f67f723a2 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreCollectionFixture.cs @@ -0,0 +1,9 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; + +[CollectionDefinition("WeaviateVectorStoreCollection")] +public class WeaviateVectorStoreCollectionFixture : ICollectionFixture +{ } diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreFixture.cs new file mode 100644 index 000000000000..de74c286bb3f --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreFixture.cs @@ -0,0 +1,126 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; +using Docker.DotNet; +using Docker.DotNet.Models; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; + +public class WeaviateVectorStoreFixture : IAsyncLifetime +{ + /// The Docker client we are using to create a Weaviate container with. + private readonly DockerClient _client; + + /// The id of the Weaviate container that we are testing with. + private string? _containerId = null; + + public HttpClient? HttpClient { get; private set; } + + public WeaviateVectorStoreFixture() + { + using var dockerClientConfiguration = new DockerClientConfiguration(); + this._client = dockerClientConfiguration.CreateClient(); + } + + public async Task InitializeAsync() + { + this._containerId = await SetupWeaviateContainerAsync(this._client); + + this.HttpClient = new HttpClient { BaseAddress = new Uri("http://localhost:8080/v1/") }; + + await WaitForInitializationAsync(this.HttpClient); + } + + public async Task DisposeAsync() + { + if (this._containerId != null) + { + await this._client.Containers.StopContainerAsync(this._containerId, new ContainerStopParameters()); + await this._client.Containers.RemoveContainerAsync(this._containerId, new ContainerRemoveParameters()); + } + } + + #region private + + private async static Task WaitForInitializationAsync(HttpClient httpClient) + { + const int MaxAttemptCount = 10; + const int DelayInterval = 1000; + + int attemptCount = 0; + bool clusterReady = false; + + do + { + await Task.Delay(DelayInterval); + attemptCount++; + clusterReady = await CheckIfClusterReadyAsync(httpClient); + } while (!clusterReady && attemptCount <= MaxAttemptCount); + + if (!clusterReady) + { + throw new InvalidOperationException("Weaviate cluster is not ready for usage."); + } + } + + private static async Task CheckIfClusterReadyAsync(HttpClient httpClient) + { + try + { + var response = await httpClient.GetAsync(new Uri("schema", UriKind.Relative)); + + return response.StatusCode == HttpStatusCode.OK; + } + catch (HttpRequestException) + { + return false; + } + } + + private static async Task SetupWeaviateContainerAsync(DockerClient client) + { + const string Image = "cr.weaviate.io/semitechnologies/weaviate"; + const string Tag = "1.26.4"; + + await client.Images.CreateImageAsync( + new ImagesCreateParameters + { + FromImage = Image, + Tag = Tag, + }, + null, + new Progress()); + + var container = await client.Containers.CreateContainerAsync(new CreateContainerParameters() + { + Image = $"{Image}:{Tag}", + HostConfig = new HostConfig() + { + PortBindings = new Dictionary> + { + { "8080", new List { new() { HostPort = "8080" } } }, + { "50051", new List { new() { HostPort = "50051" } } } + }, + PublishAllPorts = true + }, + ExposedPorts = new Dictionary + { + { "8080", default }, + { "50051", default } + }, + }); + + await client.Containers.StartContainerAsync( + container.ID, + new ContainerStartParameters()); + + return container.ID; + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs new file mode 100644 index 000000000000..3b68fa65f840 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreRecordCollectionTests.cs @@ -0,0 +1,294 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; + +[Collection("WeaviateVectorStoreCollection")] +public sealed class WeaviateVectorStoreRecordCollectionTests(WeaviateVectorStoreFixture fixture) +{ + [Fact] + public async Task ItCanCreateCollectionAsync() + { + // Arrange + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestCreateCollection"); + + // Act + await sut.CreateCollectionAsync(); + + // Assert + Assert.True(await sut.CollectionExistsAsync()); + } + + [Theory] + [InlineData("ExistingCollection", true)] + [InlineData("NonExistentCollection", false)] + public async Task ItCanCheckIfCollectionExistsAsync(string collectionName, bool collectionExists) + { + // Arrange + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, collectionName); + + if (collectionExists) + { + await sut.CreateCollectionAsync(); + } + + // Act + var result = await sut.CollectionExistsAsync(); + + // Assert + Assert.Equal(collectionExists, result); + } + + [Theory] + [InlineData("CollectionWithVectorAndDefinition", true, true)] + [InlineData("CollectionWithVector", true, false)] + [InlineData("CollectionWithDefinition", false, true)] + [InlineData("CollectionWithoutVectorAndDefinition", false, false)] + public async Task ItCanUpsertAndGetRecordAsync(string collectionName, bool includeVectors, bool useRecordDefinition) + { + // Arrange + var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); + + var options = new WeaviateVectorStoreRecordCollectionOptions + { + VectorStoreRecordDefinition = useRecordDefinition ? this.GetTestHotelRecordDefinition() : null + }; + + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, collectionName, options); + + var record = this.CreateTestHotel(hotelId); + + // Act && Assert + await sut.CreateCollectionAsync(); + + var upsertResult = await sut.UpsertAsync(record); + + Assert.Equal(hotelId, upsertResult); + + var getResult = await sut.GetAsync(hotelId, new() { IncludeVectors = includeVectors }); + + Assert.NotNull(getResult); + + Assert.Equal(record.HotelId, getResult.HotelId); + Assert.Equal(record.HotelName, getResult.HotelName); + Assert.Equal(record.HotelCode, getResult.HotelCode); + Assert.Equal(record.HotelRating, getResult.HotelRating); + Assert.Equal(record.ParkingIncluded, getResult.ParkingIncluded); + Assert.Equal(record.Tags.ToArray(), getResult.Tags.ToArray()); + Assert.Equal(record.Description, getResult.Description); + Assert.Equal(record.Timestamp, getResult.Timestamp); + + if (includeVectors) + { + Assert.NotNull(getResult.DescriptionEmbedding); + Assert.Equal(record.DescriptionEmbedding!.Value.ToArray(), getResult.DescriptionEmbedding.Value.ToArray()); + } + else + { + Assert.Null(getResult.DescriptionEmbedding); + } + } + + [Fact] + public async Task ItCanDeleteCollectionAsync() + { + // Arrange + const string CollectionName = "TestDeleteCollection"; + + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, CollectionName); + + await sut.CreateCollectionAsync(); + + Assert.True(await sut.CollectionExistsAsync()); + + // Act + await sut.DeleteCollectionAsync(); + + // Assert + Assert.False(await sut.CollectionExistsAsync()); + } + + [Fact] + public async Task ItCanDeleteRecordAsync() + { + // Arrange + var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); + + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestDeleteRecord"); + + await sut.CreateCollectionAsync(); + + var record = this.CreateTestHotel(hotelId); + + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(hotelId); + + Assert.Equal(hotelId, upsertResult); + Assert.NotNull(getResult); + + // Act + await sut.DeleteAsync(hotelId); + + getResult = await sut.GetAsync(hotelId); + + // Assert + Assert.Null(getResult); + } + + [Fact] + public async Task ItCanUpsertAndGetAndDeleteBatchAsync() + { + // Arrange + var hotelId1 = new Guid("11111111-1111-1111-1111-111111111111"); + var hotelId2 = new Guid("22222222-2222-2222-2222-222222222222"); + var hotelId3 = new Guid("33333333-3333-3333-3333-333333333333"); + + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestBatch"); + + await sut.CreateCollectionAsync(); + + var record1 = this.CreateTestHotel(hotelId1); + var record2 = this.CreateTestHotel(hotelId2); + var record3 = this.CreateTestHotel(hotelId3); + + var upsertResults = await sut.UpsertBatchAsync([record1, record2, record3]).ToListAsync(); + var getResults = await sut.GetBatchAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); + + Assert.Equal([hotelId1, hotelId2, hotelId3], upsertResults); + + Assert.NotNull(getResults.First(l => l.HotelId == hotelId1)); + Assert.NotNull(getResults.First(l => l.HotelId == hotelId2)); + Assert.NotNull(getResults.First(l => l.HotelId == hotelId3)); + + // Act + await sut.DeleteBatchAsync([hotelId1, hotelId2, hotelId3]); + + getResults = await sut.GetBatchAsync([hotelId1, hotelId2, hotelId3]).ToListAsync(); + + // Assert + Assert.Empty(getResults); + } + + [Fact] + public async Task ItCanUpsertRecordAsync() + { + // Arrange + var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); + var sut = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "TestUpsert"); + + await sut.CreateCollectionAsync(); + + var record = this.CreateTestHotel(hotelId); + + var upsertResult = await sut.UpsertAsync(record); + var getResult = await sut.GetAsync(hotelId); + + Assert.Equal(hotelId, upsertResult); + Assert.NotNull(getResult); + + // Act + record.HotelName = "Updated name"; + record.HotelRating = 10; + + upsertResult = await sut.UpsertAsync(record); + getResult = await sut.GetAsync(hotelId); + + // Assert + Assert.NotNull(getResult); + Assert.Equal("Updated name", getResult.HotelName); + Assert.Equal(10, getResult.HotelRating); + } + + [Fact] + public async Task ItCanUpsertAndRetrieveUsingTheGenericMapperAsync() + { + // Arrange + var hotelId = new Guid("55555555-5555-5555-5555-555555555555"); + var options = new WeaviateVectorStoreRecordCollectionOptions> + { + VectorStoreRecordDefinition = this.GetTestHotelRecordDefinition() + }; + + var sut = new WeaviateVectorStoreRecordCollection>(fixture.HttpClient!, "TestGenericMapper", options); + + await sut.CreateCollectionAsync(); + + // Act + var upsertResult = await sut.UpsertAsync(new VectorStoreGenericDataModel(hotelId) + { + Data = + { + { "HotelName", "Generic Mapper Hotel" }, + { "Description", "This is a generic mapper hotel" }, + { "Tags", new List { "generic" } }, + { "parking_is_included", false }, + { "Timestamp", new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero) }, + { "HotelRating", 3.6f } + }, + Vectors = + { + { "DescriptionEmbedding", new ReadOnlyMemory([30f, 31f, 32f, 33f]) } + } + }); + + var localGetResult = await sut.GetAsync(hotelId, new GetRecordOptions { IncludeVectors = true }); + + // Assert + Assert.Equal(hotelId, upsertResult); + + Assert.NotNull(localGetResult); + Assert.Equal("Generic Mapper Hotel", localGetResult.Data["HotelName"]); + Assert.Equal("This is a generic mapper hotel", localGetResult.Data["Description"]); + Assert.Equal(new List { "generic" }, localGetResult.Data["Tags"]); + Assert.False((bool?)localGetResult.Data["parking_is_included"]); + Assert.Equal(new DateTimeOffset(1970, 1, 18, 0, 0, 0, TimeSpan.Zero), localGetResult.Data["Timestamp"]); + Assert.Equal(3.6f, localGetResult.Data["HotelRating"]); + Assert.Equal(new[] { 30f, 31f, 32f, 33f }, ((ReadOnlyMemory)localGetResult.Vectors["DescriptionEmbedding"]!).ToArray()); + } + + #region private + + private WeaviateHotel CreateTestHotel(Guid hotelId, string? hotelName = null) + { + return new WeaviateHotel + { + HotelId = hotelId, + HotelName = hotelName ?? $"My Hotel {hotelId}", + HotelCode = 42, + HotelRating = 4.5f, + ParkingIncluded = true, + Tags = { "t1", "t2" }, + Description = "This is a great hotel.", + DescriptionEmbedding = new[] { 30f, 31f, 32f, 33f }, + Timestamp = new DateTime(2024, 8, 28, 10, 11, 12) + }; + } + + private VectorStoreRecordDefinition GetTestHotelRecordDefinition() + { + return new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("HotelId", typeof(Guid)), + new VectorStoreRecordDataProperty("HotelName", typeof(string)), + new VectorStoreRecordDataProperty("HotelCode", typeof(int)), + new VectorStoreRecordDataProperty("parking_is_included", typeof(bool)), + new VectorStoreRecordDataProperty("HotelRating", typeof(float)), + new VectorStoreRecordDataProperty("Tags", typeof(List)), + new VectorStoreRecordDataProperty("Description", typeof(string)), + new VectorStoreRecordDataProperty("Timestamp", typeof(DateTimeOffset)), + new VectorStoreRecordVectorProperty("DescriptionEmbedding", typeof(ReadOnlyMemory?)) { Dimensions = 4, IndexKind = IndexKind.Hnsw, DistanceFunction = DistanceFunction.CosineDistance } + ] + }; + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreTests.cs new file mode 100644 index 000000000000..7de9413084ae --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/WeaviateVectorStoreTests.cs @@ -0,0 +1,35 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Connectors.Weaviate; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Memory.Weaviate; + +[Collection("WeaviateVectorStoreCollection")] +public sealed class WeaviateVectorStoreTests(WeaviateVectorStoreFixture fixture) +{ + [Fact] + public async Task ItCanGetAListOfExistingCollectionNamesAsync() + { + // Arrange + var collection1 = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "Collection1"); + var collection2 = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "Collection2"); + var collection3 = new WeaviateVectorStoreRecordCollection(fixture.HttpClient!, "Collection3"); + + await collection1.CreateCollectionAsync(); + await collection2.CreateCollectionAsync(); + await collection3.CreateCollectionAsync(); + + var sut = new WeaviateVectorStore(fixture.HttpClient!); + + // Act + var collectionNames = await sut.ListCollectionNamesAsync().ToListAsync(); + + // Assert + Assert.Contains("Collection1", collectionNames); + Assert.Contains("Collection2", collectionNames); + Assert.Contains("Collection3", collectionNames); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Weaviate/docker-compose.yml b/dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/docker-compose.yml similarity index 100% rename from dotnet/src/IntegrationTests/Connectors/Weaviate/docker-compose.yml rename to dotnet/src/IntegrationTests/Connectors/Memory/Weaviate/docker-compose.yml diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs new file mode 100644 index 000000000000..5dced3f7b4b4 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs @@ -0,0 +1,182 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Ollama; +using OllamaSharp.Models.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.Ollama; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OllamaCompletionTests(ITestOutputHelper output) : IDisposable +{ + private const string InputParameterName = "input"; + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Theory(Skip = "For manual verification only")] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnswerContains) + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + + this.ConfigureChatOllama(this._kernelBuilder); + + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + StringBuilder fullResult = new(); + // Act + await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + Assert.NotNull(content.InnerContent); + if (content is StreamingChatMessageContent messageContent) + { + Assert.NotNull(messageContent.Role); + } + + fullResult.Append(content); + } + + // Assert + Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact(Skip = "For manual verification only")] + public async Task ItShouldReturnInnerContentAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + + this.ConfigureChatOllama(this._kernelBuilder); + + var kernel = this._kernelBuilder.Build(); + + var plugin = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + StreamingKernelContent? lastUpdate = null; + await foreach (var update in kernel.InvokeStreamingAsync(plugin["FunPlugin"]["Limerick"])) + { + lastUpdate = update; + } + + // Assert + Assert.NotNull(lastUpdate); + Assert.NotNull(lastUpdate.InnerContent); + Assert.IsType(lastUpdate.InnerContent); + var innerContent = lastUpdate.InnerContent as ChatDoneResponseStream; + Assert.NotNull(innerContent); + Assert.NotNull(innerContent.CreatedAt); + Assert.True(innerContent.Done); + } + + [Theory(Skip = "For manual verification only")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding) + { + // Arrange + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + const string ExpectedAnswerContains = "result"; + + this._kernelBuilder.Services.AddSingleton(this._logger); + this.ConfigureChatOllama(this._kernelBuilder); + + Kernel target = this._kernelBuilder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + // Act + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact(Skip = "For manual verification only")] + public async Task ItInvokePromptTestAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + this.ConfigureChatOllama(builder); + Kernel target = builder.Build(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OllamaPromptExecutionSettings() { Temperature = 0.5f })); + + // Assert + Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Theory(Skip = "For manual verification only")] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains) + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + + this.ConfigureChatOllama(this._kernelBuilder); + + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + // Act + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + #region internals + + private readonly XunitLogger _logger = new(output); + private readonly RedirectOutput _testOutputHelper = new(output); + + public void Dispose() + { + this._logger.Dispose(); + this._testOutputHelper.Dispose(); + } + + private void ConfigureChatOllama(IKernelBuilder kernelBuilder) + { + var config = this._configuration.GetSection("Ollama").Get(); + + Assert.NotNull(config); + Assert.NotNull(config.Endpoint); + Assert.NotNull(config.ModelId); + + kernelBuilder.AddOllamaChatCompletion( + modelId: config.ModelId, + endpoint: new Uri(config.Endpoint)); + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs new file mode 100644 index 000000000000..222873eccfb6 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs @@ -0,0 +1,70 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel.Connectors.Ollama; +using Microsoft.SemanticKernel.Embeddings; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.Ollama; + +public sealed class OllamaTextEmbeddingTests +{ + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Theory(Skip = "For manual verification only")] + [InlineData("mxbai-embed-large", 1024)] + [InlineData("nomic-embed-text", 768)] + [InlineData("all-minilm", 384)] + public async Task GenerateEmbeddingHasExpectedLengthForModelAsync(string modelId, int expectedVectorLength) + { + // Arrange + const string TestInputString = "test sentence"; + + OllamaConfiguration? config = this._configuration.GetSection("Ollama").Get(); + Assert.NotNull(config); + Assert.NotNull(config.Endpoint); + + var embeddingGenerator = new OllamaTextEmbeddingGenerationService( + modelId, + new Uri(config.Endpoint)); + + // Act + var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString); + + // Assert + Assert.Equal(expectedVectorLength, result.Length); + } + + [Theory(Skip = "For manual verification only")] + [InlineData("mxbai-embed-large", 1024)] + [InlineData("nomic-embed-text", 768)] + [InlineData("all-minilm", 384)] + public async Task GenerateEmbeddingsHasExpectedResultsLengthForModelAsync(string modelId, int expectedVectorLength) + { + // Arrange + string[] testInputStrings = ["test sentence 1", "test sentence 2", "test sentence 3"]; + + OllamaConfiguration? config = this._configuration.GetSection("Ollama").Get(); + Assert.NotNull(config); + Assert.NotNull(config.Endpoint); + + var embeddingGenerator = new OllamaTextEmbeddingGenerationService( + modelId, + new Uri(config.Endpoint)); + + // Act + var result = await embeddingGenerator.GenerateEmbeddingsAsync(testInputStrings); + + // Assert + Assert.Equal(testInputStrings.Length, result.Count); + Assert.All(result, r => Assert.Equal(expectedVectorLength, r.Length)); + } +} diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs new file mode 100644 index 000000000000..126980f57ede --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs @@ -0,0 +1,181 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.Ollama; +using OllamaSharp.Models; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using Xunit.Abstractions; + +namespace SemanticKernel.IntegrationTests.Connectors.Ollama; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OllamaTextGenerationTests(ITestOutputHelper output) : IDisposable +{ + private const string InputParameterName = "input"; + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + [Theory(Skip = "For manual verification only")] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnswerContains) + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + + this.ConfigureTextOllama(this._kernelBuilder); + + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + StringBuilder fullResult = new(); + // Act + await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + Assert.NotNull(content.InnerContent); + } + + // Assert + Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact(Skip = "For manual verification only")] + public async Task ItShouldReturnInnerContentAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + + this.ConfigureTextOllama(this._kernelBuilder); + + var kernel = this._kernelBuilder.Build(); + + var plugin = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + StreamingKernelContent? lastUpdate = null; + await foreach (var update in kernel.InvokeStreamingAsync(plugin["FunPlugin"]["Limerick"])) + { + lastUpdate = update; + } + + // Assert + Assert.NotNull(lastUpdate); + Assert.NotNull(lastUpdate.InnerContent); + + Assert.IsType(lastUpdate.InnerContent); + var innerContent = lastUpdate.InnerContent as GenerateDoneResponseStream; + Assert.NotNull(innerContent); + Assert.NotNull(innerContent.CreatedAt); + Assert.True(innerContent.Done); + } + + [Theory(Skip = "For manual verification only")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding) + { + // Arrange + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + const string ExpectedAnswerContains = "result"; + + this._kernelBuilder.Services.AddSingleton(this._logger); + this.ConfigureTextOllama(this._kernelBuilder); + + Kernel target = this._kernelBuilder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + // Act + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact(Skip = "For manual verification only")] + public async Task ItInvokePromptTestAsync() + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + this.ConfigureTextOllama(builder); + Kernel target = builder.Build(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OllamaPromptExecutionSettings() { Temperature = 0.5f })); + + // Assert + Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Theory(Skip = "For manual verification only")] + [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] + public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains) + { + // Arrange + this._kernelBuilder.Services.AddSingleton(this._logger); + var builder = this._kernelBuilder; + + this.ConfigureTextOllama(this._kernelBuilder); + + Kernel target = builder.Build(); + + IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); + + // Act + FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); + var content = actual.GetValue(); + Assert.NotNull(content); + Assert.NotNull(content.InnerContent); + } + + #region internals + + private readonly XunitLogger _logger = new(output); + private readonly RedirectOutput _testOutputHelper = new(output); + + public void Dispose() + { + this._logger.Dispose(); + this._testOutputHelper.Dispose(); + } + + private void ConfigureTextOllama(IKernelBuilder kernelBuilder) + { + var config = this._configuration.GetSection("Ollama").Get(); + + Assert.NotNull(config); + Assert.NotNull(config.Endpoint); + Assert.NotNull(config.ModelId); + + kernelBuilder.AddOllamaTextGeneration( + modelId: config.ModelId, + endpoint: new Uri(config.Endpoint)); + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs deleted file mode 100644 index b09a7a5ef635..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/AIServiceType.cs +++ /dev/null @@ -1,19 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -/// -/// Enumeration to run integration tests for different AI services -/// -public enum AIServiceType -{ - /// - /// Open AI service - /// - OpenAI = 0, - - /// - /// Azure Open AI service - /// - AzureOpenAI = 1 -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs deleted file mode 100644 index bf102a517e52..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/ChatHistoryTests.cs +++ /dev/null @@ -1,149 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.ComponentModel; -using System.Linq; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.ChatCompletion; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -public sealed class ChatHistoryTests(ITestOutputHelper output) : IDisposable -{ - private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); - private readonly XunitLogger _logger = new(output); - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - private static readonly JsonSerializerOptions s_jsonOptionsCache = new() { WriteIndented = true }; - - [Fact] - public async Task ItSerializesAndDeserializesChatHistoryAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - builder.Plugins.AddFromType(); - var kernel = builder.Build(); - - OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - ChatHistory history = []; - - // Act - history.AddUserMessage("Make me a special poem"); - var historyBeforeJson = JsonSerializer.Serialize(history.ToList(), s_jsonOptionsCache); - var service = kernel.GetRequiredService(); - ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); - history.AddUserMessage("Ok thank you"); - - ChatMessageContent resultOriginalWorking = await service.GetChatMessageContentAsync(history, settings, kernel); - var historyJson = JsonSerializer.Serialize(history, s_jsonOptionsCache); - var historyAfterSerialization = JsonSerializer.Deserialize(historyJson); - var exception = await Record.ExceptionAsync(() => service.GetChatMessageContentAsync(historyAfterSerialization!, settings, kernel)); - - // Assert - Assert.Null(exception); - } - - [Fact] - public async Task ItUsesChatSystemPromptFromSettingsAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - builder.Plugins.AddFromType(); - var kernel = builder.Build(); - - string systemPrompt = "You are batman. If asked who you are, say 'I am Batman!'"; - - OpenAIPromptExecutionSettings settings = new() { ChatSystemPrompt = systemPrompt }; - ChatHistory history = []; - - // Act - history.AddUserMessage("Who are you?"); - var service = kernel.GetRequiredService(); - ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); - - // Assert - Assert.Contains("Batman", result.ToString(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task ItUsesChatSystemPromptFromChatHistoryAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - builder.Plugins.AddFromType(); - var kernel = builder.Build(); - - string systemPrompt = "You are batman. If asked who you are, say 'I am Batman!'"; - - OpenAIPromptExecutionSettings settings = new(); - ChatHistory history = new(systemPrompt); - - // Act - history.AddUserMessage("Who are you?"); - var service = kernel.GetRequiredService(); - ChatMessageContent result = await service.GetChatMessageContentAsync(history, settings, kernel); - - // Assert - Assert.Contains("Batman", result.ToString(), StringComparison.OrdinalIgnoreCase); - } - - private void ConfigureAzureOpenAIChatAsText(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("Planners:AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - kernelBuilder.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId); - } - - public class FakePlugin - { - [KernelFunction, Description("creates a special poem")] - public string CreateSpecialPoem() - { - return "ABCDE"; - } - } - - public void Dispose() - { - this.Dispose(true); - GC.SuppressFinalize(this); - } - - private void Dispose(bool disposing) - { - if (disposing) - { - this._logger.Dispose(); - } - } -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs index dd4a55f6cc2c..90375307c533 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs @@ -8,6 +8,7 @@ using Microsoft.SemanticKernel.AudioToText; using Microsoft.SemanticKernel.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; +using xRetry; using Xunit; namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; @@ -15,13 +16,13 @@ namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAIAudioToTextTests() { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() .Build(); - [Fact(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] + [RetryFact]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] public async Task OpenAIAudioToTextTestAsync() { // Arrange @@ -45,32 +46,4 @@ public async Task OpenAIAudioToTextTestAsync() // Assert Assert.Contains("The sun rises in the east and sets in the west.", result.Text, StringComparison.OrdinalIgnoreCase); } - - [Fact(Skip = "Re-enable when Azure OpenAPI service is available.")] - public async Task AzureOpenAIAudioToTextTestAsync() - { - // Arrange - const string Filename = "test_audio.wav"; - - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIAudioToText").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAIAudioToText( - azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey) - .Build(); - - var service = kernel.GetRequiredService(); - - await using Stream audio = File.OpenRead($"./TestData/{Filename}"); - var audioData = await BinaryData.FromStreamAsync(audio); - - // Act - var result = await service.GetTextContentAsync(new AudioContent(audioData, mimeType: "audio/wav"), new OpenAIAudioToTextExecutionSettings(Filename)); - - // Assert - Assert.Contains("The sun rises in the east and sets in the west.", result.Text, StringComparison.OrdinalIgnoreCase); - } } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs new file mode 100644 index 000000000000..1359b701e29c --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs @@ -0,0 +1,269 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http.Resilience; +using Microsoft.Extensions.Logging; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OpenAIChatCompletionTests : BaseIntegrationTest +{ + [Fact] + public async Task ItCanUseOpenAiChatForTextGenerationAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var func = kernel.CreateFunctionFromPrompt( + "List the two planets after '{{$input}}', excluding moons, using bullet points.", + new OpenAIPromptExecutionSettings()); + + // Act + var result = await func.InvokeAsync(kernel, new() { [InputParameterName] = "Jupiter" }); + + // Assert + Assert.NotNull(result); + Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); + } + + [Fact] + public async Task OpenAIStreamingTestAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + StringBuilder fullResult = new(); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + await foreach (var content in kernel.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) + { + fullResult.Append(content); + } + + // Assert + Assert.Contains("Pike Place", fullResult.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task OpenAIHttpRetryPolicyTestAsync() + { + // Arrange + List statusCodes = []; + + var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + Assert.NotNull(openAIConfiguration.ChatModelId); + + var kernelBuilder = Kernel.CreateBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: openAIConfiguration.ChatModelId, + apiKey: "INVALID_KEY"); + + kernelBuilder.Services.ConfigureHttpClientDefaults(c => + { + // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example + c.AddStandardResilienceHandler().Configure(o => + { + o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); + o.Retry.OnRetry = args => + { + statusCodes.Add(args.Outcome.Result?.StatusCode); + return ValueTask.CompletedTask; + }; + }); + }); + + var target = kernelBuilder.Build(); + + var plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); + + var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; + + // Act + var exception = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); + + // Assert + Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s)); + Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode); + } + + [Fact] + public async Task OpenAIShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); + + // Act + var result = await kernel.InvokeAsync(plugins["FunPlugin"]["Limerick"]); + + // Assert + Assert.NotNull(result.Metadata); + + // Usage + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokenCount", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokenCount", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + } + + [Theory(Skip = "This test is for manual verification.")] + [InlineData("\n")] + [InlineData("\r\n")] + public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding) + { + // Arrange + var prompt = + "Given a json input and a request. Apply the request on the json input and return the result. " + + $"Put the result in between tags{lineEnding}" + + $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; + + var kernel = this.CreateAndInitializeKernel(); + + var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin"); + + // Act + FunctionResult actual = await kernel.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); + + // Assert + Assert.Contains("John", actual.GetValue(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task ChatSystemPromptIsNotIgnoredAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); + + // Assert + Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task SemanticKernelVersionHeaderIsSentAsync() + { + // Arrange + using var defaultHandler = new HttpClientHandler(); + using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler); + using var httpClient = new HttpClient(httpHeaderHandler); + + var kernel = this.CreateAndInitializeKernel(httpClient); + + // Act + var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?"); + + // Assert + Assert.NotNull(httpHeaderHandler.RequestHeaders); + Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values)); + } + + //[Theory(Skip = "This test is for manual verification.")] + [Theory] + [InlineData(null, null)] + [InlineData(false, null)] + [InlineData(true, 2)] + [InlineData(true, 5)] + public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs) + { + // Arrange + var settings = new OpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs }; + + var kernel = this.CreateAndInitializeKernel(); + + // Act + var result = await kernel.InvokePromptAsync("Hi, can you help me today?", new(settings)); + + var logProbabilityInfo = result.Metadata?["ContentTokenLogProbabilities"] as IReadOnlyList; + + // Assert + Assert.NotNull(logProbabilityInfo); + + if (logprobs is true) + { + Assert.NotNull(logProbabilityInfo); + Assert.Equal(topLogprobs, logProbabilityInfo[0].TopLogProbabilities.Count); + } + else + { + Assert.Empty(logProbabilityInfo); + } + } + + #region internals + + private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null) + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId); + Assert.NotNull(OpenAIConfiguration.ApiKey); + Assert.NotNull(OpenAIConfiguration.ServiceId); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey, + serviceId: OpenAIConfiguration.ServiceId, + httpClient: httpClient); + + return kernelBuilder.Build(); + } + + private const string InputParameterName = "input"; + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler) + { + public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + this.RequestHeaders = request.Headers; + return await base.SendAsync(request, cancellationToken); + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..2a7d1d6f8f63 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_AutoFunctionChoiceBehaviorTests.cs @@ -0,0 +1,371 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +public sealed class OpenAIAutoFunctionChoiceBehaviorTests : BaseIntegrationTest +{ + private readonly Kernel _kernel; + private readonly FakeFunctionFilter _autoFunctionInvocationFilter; + private readonly IChatCompletionService _chatCompletionService; + + public OpenAIAutoFunctionChoiceBehaviorTests() + { + this._autoFunctionInvocationFilter = new FakeFunctionFilter(); + + this._kernel = this.InitializeKernel(); + this._kernel.AutoFunctionInvocationFilters.Add(this._autoFunctionInvocationFilter); + this._chatCompletionService = this._kernel.GetRequiredService(); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: auto + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + var result = await this._kernel.InvokeAsync(promptFunction); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + string result = ""; + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + result += content; + } + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: auto + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + string result = ""; + + // Act + await foreach (string c in promptFunction.InvokeStreamingAsync(this._kernel)) + { + result += c; + } + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto([plugin.ElementAt(0)], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyForStreamingAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto([plugin.ElementAt(0)], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + private Kernel InitializeKernel() + { + var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + Assert.NotNull(openAIConfiguration.ChatModelId!); + Assert.NotNull(openAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: openAIConfiguration.ChatModelId, + apiKey: openAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// A plugin that returns the current time. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DateTimeUtils +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("Retrieves the current date.")] + public string GetCurrentDate() => DateTime.UtcNow.ToString("d", CultureInfo.InvariantCulture); + } + + #region private + + private sealed class FakeFunctionFilter : IAutoFunctionInvocationFilter + { + private Func, Task>? _onFunctionInvocation; + + public void RegisterFunctionInvocationHandler(Func, Task> onFunctionInvocation) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + if (this._onFunctionInvocation is null) + { + return next(context); + } + + return this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs similarity index 64% rename from dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs rename to dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs index 049287fbbc14..185e49a598f1 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_FunctionCallingTests.cs @@ -2,58 +2,55 @@ using System; using System.Collections.Generic; -using System.ComponentModel; +using System.IO; using System.Linq; using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Azure.AI.OpenAI; using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Time.Testing; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.Planners.Stepwise; +using OpenAI.Chat; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; +using ChatMessageContent = Microsoft.SemanticKernel.ChatMessageContent; + namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; -public sealed class OpenAIToolsTests : BaseIntegrationTest +public sealed class OpenAIChatCompletionFunctionCallingTests : BaseIntegrationTest { - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); - var invokedFunctions = new List(); var filter = new FakeFunctionFilter(async (context, next) => { - invokedFunctions.Add(context.Function.Name); + invokedFunctions.Add($"{context.Function.Name}({string.Join(", ", context.Arguments)})"); await next(context); }); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); kernel.FunctionInvocationFilters.Add(filter); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - var result = await kernel.InvokePromptAsync("How many days until Christmas? Explain your thinking.", new(settings)); + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); // Assert - Assert.NotNull(result); - Assert.Contains("GetCurrentUtcTime", invokedFunctions); + Assert.Contains("rain", result.GetValue(), StringComparison.InvariantCulture); + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsStreamingAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); - var invokedFunctions = new List(); var filter = new FakeFunctionFilter(async (context, next) => @@ -62,34 +59,46 @@ public async Task CanAutoInvokeKernelFunctionsStreamingAsync() await next(context); }); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); kernel.FunctionInvocationFilters.Add(filter); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - string result = ""; - await foreach (string c in kernel.InvokePromptStreamingAsync( - $"How much older is John than Jim? Compute that value and pass it to the {nameof(TimeInformation)}.{nameof(TimeInformation.InterpretValue)} function, then respond only with its result.", - new(settings))) + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in kernel.InvokePromptStreamingAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings))) { - result += c; + stringBuilder.Append(update); } // Assert - Assert.Contains("6", result, StringComparison.InvariantCulture); - Assert.Contains("GetAge([personName, John])", invokedFunctions); - Assert.Contains("GetAge([personName, Jim])", invokedFunctions); - Assert.Contains("InterpretValue([value, 3])", invokedFunctions); + Assert.Contains("rain", stringBuilder.ToString(), StringComparison.InvariantCulture); + Assert.Contains("GetCurrentUtcTime()", invokedFunctions); + Assert.Contains("Get_Weather_For_City([cityName, Boston])", invokedFunctions); } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); + var kernel = this.CreateAndInitializeKernel(); + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod((WeatherParameters parameters) => + { + if (parameters.City.Name == "Dublin" && (parameters.City.Country == "Ireland" || parameters.City.Country == "IE")) + { + return Task.FromResult(42.8); // 42.8 Fahrenheit. + } + + throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); + }, "Get_Current_Temperature", "Get current temperature."), + ]); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var result = await kernel.InvokePromptAsync("What is the current temperature in Dublin, Ireland, in Fahrenheit?", new(settings)); // Assert @@ -97,15 +106,15 @@ public async Task CanAutoInvokeKernelFunctionsWithComplexTypeParametersAsync() Assert.Contains("42.8", result.GetValue(), StringComparison.InvariantCulture); // The WeatherPlugin always returns 42.8 for Dublin, Ireland. } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - kernel.ImportPluginFromType(); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var result = await kernel.InvokePromptAsync("Convert 50 degrees Fahrenheit to Celsius.", new(settings)); // Assert @@ -113,32 +122,27 @@ public async Task CanAutoInvokeKernelFunctionsWithPrimitiveTypeParametersAsync() Assert.Contains("10", result.GetValue(), StringComparison.InvariantCulture); } - [Fact(Skip = "OpenAI is throttling requests. Switch this test to use Azure OpenAI.")] + [Fact] public async Task CanAutoInvokeKernelFunctionsWithEnumTypeParametersAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); - var timeProvider = new FakeTimeProvider(); - timeProvider.SetUtcNow(new DateTimeOffset(new DateTime(2024, 4, 24))); // Wednesday - var timePlugin = new TimePlugin(timeProvider); - kernel.ImportPluginFromObject(timePlugin, nameof(TimePlugin)); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - var result = await kernel.InvokePromptAsync( - "When was last friday? Show the date in format DD.MM.YYYY for example: 15.07.2019", - new(settings)); + + // Act + var result = await kernel.InvokePromptAsync("Given the current time of day and weather, what is the likely color of the sky in Boston?", new(settings)); // Assert Assert.NotNull(result); - Assert.Contains("19.04.2024", result.GetValue(), StringComparison.OrdinalIgnoreCase); + Assert.Contains("rain", result.GetValue(), StringComparison.OrdinalIgnoreCase); } [Fact] public async Task CanAutoInvokeKernelFunctionFromPromptAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); + var kernel = this.CreateAndInitializeKernel(); var promptFunction = KernelFunctionFactory.CreateFromPrompt( "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.", @@ -150,8 +154,9 @@ public async Task CanAutoInvokeKernelFunctionFromPromptAsync() "Delivers up-to-date news content.", [promptFunction])); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var result = await kernel.InvokePromptAsync("Show me the latest news as they are.", new(settings)); // Assert @@ -163,7 +168,7 @@ public async Task CanAutoInvokeKernelFunctionFromPromptAsync() public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() { // Arrange - Kernel kernel = this.InitializeKernel(); + var kernel = this.CreateAndInitializeKernel(); var promptFunction = KernelFunctionFactory.CreateFromPrompt( "Your role is always to return this text - 'A Game-Changer for the Transportation Industry'. Don't ask for more details or context.", @@ -175,8 +180,9 @@ public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() "Delivers up-to-date news content.", [promptFunction])); - // Act OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act var streamingResult = kernel.InvokePromptStreamingAsync("Show me the latest news as they are.", new(settings)); var builder = new StringBuilder(); @@ -197,7 +203,7 @@ public async Task CanAutoInvokeKernelFunctionFromPromptStreamingAsync() public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFunctionCallingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -210,7 +216,7 @@ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFu var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); // Current way of handling function calls manually using connector specific chat message content class. - var toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + var toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); while (toolCalls.Count > 0) { @@ -233,7 +239,7 @@ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFu // Sending the functions invocation results back to the LLM to get the final response result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); - toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); + toolCalls = ((OpenAIChatMessageContent)result).ToolCalls.OfType().ToList(); } // Assert @@ -244,7 +250,7 @@ public async Task ConnectorSpecificChatMessageContentClassesCanBeUsedForManualFu public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -280,14 +286,14 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManual Assert.Contains("rain", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); } - [Fact(Skip = "The test is temporarily disabled until a more stable solution is found.")] + [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); - chatHistory.AddSystemMessage("If you are unable to answer the question for whatever reason, please add the 'error' keyword to the response."); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; @@ -321,14 +327,14 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExc // Assert Assert.NotNull(messageContent.Content); - Assert.Contains("error", messageContent.Content, StringComparison.InvariantCultureIgnoreCase); + TestHelpers.AssertChatErrorExcuseMessage(messageContent.Content); } [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response."); @@ -377,7 +383,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFu public async Task ItFailsIfNoFunctionResultProvidedAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -401,7 +407,7 @@ public async Task ItFailsIfNoFunctionResultProvidedAsync() public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -414,42 +420,72 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); // Assert - Assert.Equal(5, chatHistory.Count); - var userMessage = chatHistory[0]; Assert.Equal(AuthorRole.User, userMessage.Role); - // LLM requested the current time. - var getCurrentTimeFunctionCallRequestMessage = chatHistory[1]; - Assert.Equal(AuthorRole.Assistant, getCurrentTimeFunctionCallRequestMessage.Role); + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } - var getCurrentTimeFunctionCallRequest = getCurrentTimeFunctionCallRequestMessage.Items.OfType().Single(); Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); - // Connector invoked the GetCurrentUtcTime function and added result to chat history. - var getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); Assert.NotNull(getCurrentTimeFunctionCallResult.Result); - // LLM requested the weather for Boston. - var getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; - Assert.Equal(AuthorRole.Assistant, getWeatherForCityFunctionCallRequestMessage.Role); - - var getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); - Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); - Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); - Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); - - // Connector invoked the Get_Weather_For_City function and added result to chat history. - var getWeatherForCityFunctionCallResultMessage = chatHistory[4]; Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. @@ -464,7 +500,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; @@ -520,7 +556,7 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManual public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var chatHistory = new ChatHistory(); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); @@ -538,42 +574,72 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu } // Assert - Assert.Equal(5, chatHistory.Count); - var userMessage = chatHistory[0]; Assert.Equal(AuthorRole.User, userMessage.Role); - // LLM requested the current time. - var getCurrentTimeFunctionCallRequestMessage = chatHistory[1]; - Assert.Equal(AuthorRole.Assistant, getCurrentTimeFunctionCallRequestMessage.Role); + // LLM requested the functions to call. + var getParallelFunctionCallRequestMessage = chatHistory[1]; + Assert.Equal(AuthorRole.Assistant, getParallelFunctionCallRequestMessage.Role); + + // Parallel Function Calls in the same request + var functionCalls = getParallelFunctionCallRequestMessage.Items.OfType().ToArray(); + + ChatMessageContent getCurrentTimeFunctionCallResultMessage; + ChatMessageContent getWeatherForCityFunctionCallRequestMessage; + FunctionCallContent getWeatherForCityFunctionCallRequest; + FunctionCallContent getCurrentTimeFunctionCallRequest; + ChatMessageContent getWeatherForCityFunctionCallResultMessage; + + // Assert + // Non Parallel Tool Calling + if (functionCalls.Length == 1) + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; + getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[4]; + } + else // Parallel Tool Calling + { + // LLM requested the current time. + getCurrentTimeFunctionCallRequest = functionCalls[0]; + + // LLM requested the weather for Boston. + getWeatherForCityFunctionCallRequest = functionCalls[1]; + + // Connector invoked the GetCurrentUtcTime function and added result to chat history. + getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + + // Connector invoked the Get_Weather_For_City function and added result to chat history. + getWeatherForCityFunctionCallResultMessage = chatHistory[3]; + } - var getCurrentTimeFunctionCallRequest = getCurrentTimeFunctionCallRequestMessage.Items.OfType().Single(); Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName); Assert.NotNull(getCurrentTimeFunctionCallRequest.Id); - // Connector invoked the GetCurrentUtcTime function and added result to chat history. - var getCurrentTimeFunctionCallResultMessage = chatHistory[2]; + Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); + Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); + Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); + Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role); Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single(); + // Connector invoked the GetCurrentUtcTime function and added result to chat history. Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName); Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName); Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId); Assert.NotNull(getCurrentTimeFunctionCallResult.Result); - // LLM requested the weather for Boston. - var getWeatherForCityFunctionCallRequestMessage = chatHistory[3]; - Assert.Equal(AuthorRole.Assistant, getWeatherForCityFunctionCallRequestMessage.Role); - - var getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single(); - Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName); - Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName); - Assert.NotNull(getWeatherForCityFunctionCallRequest.Id); - - // Connector invoked the Get_Weather_For_City function and added result to chat history. - var getWeatherForCityFunctionCallResultMessage = chatHistory[4]; Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role); Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call. @@ -584,18 +650,18 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu Assert.NotNull(getWeatherForCityFunctionCallResult.Result); } - [Fact(Skip = "The test is temporarily disabled until a more stable solution is found.")] + [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; var sut = kernel.GetRequiredService(); var chatHistory = new ChatHistory(); - chatHistory.AddSystemMessage("If you are unable to answer the question for whatever reason, please add the 'error' keyword to the response."); + chatHistory.AddSystemMessage("Add the \"Error\" keyword to the response, if you are unable to answer a question or an error has happen."); chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); string? result = null; @@ -639,14 +705,14 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExc } // Assert - Assert.Contains("error", result, StringComparison.InvariantCultureIgnoreCase); + TestHelpers.AssertChatErrorExcuseMessage(result); } [Fact] public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsForStreamingAsync() { // Arrange - var kernel = this.InitializeKernel(importHelperPlugin: true); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions }; @@ -707,91 +773,188 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFu Assert.Contains("tornado", result, StringComparison.InvariantCultureIgnoreCase); } - private Kernel InitializeKernel(bool importHelperPlugin = false) + [Fact] + public async Task ItShouldSupportOldFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() { - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("Planners:OpenAI").Get(); - Assert.NotNull(openAIConfiguration); + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove connector-agnostic function-calling items to check if the old function-calling model, which relies on function information in metadata, is handled correctly. + foreach (var chatMessage in chatHistory!) + { + var index = 0; + while (index < chatMessage.Items.Count) + { + var item = chatMessage.Items[index]; + if (item is FunctionCallContent || item is FunctionResultContent) + { + chatMessage.Items.Remove(item); + continue; + } + index++; + } + } - IKernelBuilder builder = this.CreateKernelBuilder() - .AddOpenAIChatCompletion( - modelId: openAIConfiguration.ModelId, - apiKey: openAIConfiguration.ApiKey); + string? emailBody = null, emailRecipient = null; - var kernel = builder.Build(); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail")]); - if (importHelperPlugin) + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email: abc@domain.com"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61", emailBody); + } + + [Fact] + public async Task ItShouldSupportNewFunctionCallingModelSerializedIntoChatHistoryByPreviousVersionOfSKAsync() + { + // Arrange + var chatHistory = JsonSerializer.Deserialize(File.ReadAllText("./TestData/serializedChatHistoryV1_15_1.json")); + + // Remove metadata related to the old function-calling model to check if the new model, which relies on function call content/result classes, is handled correctly. + foreach (var chatMessage in chatHistory!) { - kernel.ImportPluginFromFunctions("HelperFunctions", - [ - kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), - kernel.CreateFunctionFromMethod((string cityName) => - cityName switch - { - "Boston" => "61 and rainy", - _ => "31 and snowing", - }, "Get_Weather_For_City", "Gets the current weather for the specified city"), - ]); + if (chatMessage.Metadata is not null) + { + var metadata = new Dictionary(chatMessage.Metadata); + metadata.Remove(OpenAIChatMessageContent.ToolIdProperty); + metadata.Remove("ChatResponseMessage.FunctionToolCalls"); + chatMessage.Metadata = metadata; + } } - return kernel; - } + string? emailBody = null, emailRecipient = null; - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); + kernel.ImportPluginFromFunctions("EmailPlugin", [KernelFunctionFactory.CreateFromMethod((string body, string recipient) => { emailBody = body; emailRecipient = recipient; }, "SendEmail")]); + + // The deserialized chat history contains a list of function calls and the final answer to the question regarding the color of the sky in Boston. + chatHistory.AddUserMessage("Send the exact answer to my email: abc@domain.com"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; + + // Act + var result = await kernel.GetRequiredService().GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.Equal("abc@domain.com", emailRecipient); + Assert.Contains("61\u00B0F", emailBody); + } /// - /// A plugin that returns the current time. + /// This test verifies that the connector can handle the scenario where the assistant response message is added to the chat history. + /// The assistant response message with no function calls added to chat history caused the error: HTTP 400 (invalid_request_error:) [] should be non-empty - 'messages.3.tool_calls' /// - public class TimeInformation + [Fact] + public async Task AssistantResponseAddedToChatHistoryShouldBeHandledCorrectlyAsync() { - [KernelFunction] - [Description("Retrieves the current time in UTC.")] - public string GetCurrentUtcTime() => DateTime.UtcNow.ToString("R"); + // Arrange + var kernel = this.CreateAndInitializeKernel(importHelperPlugin: true); - [KernelFunction] - [Description("Gets the age of the specified person.")] - public int GetAge(string personName) - { - if ("John".Equals(personName, StringComparison.OrdinalIgnoreCase)) - { - return 33; - } + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?"); - if ("Jim".Equals(personName, StringComparison.OrdinalIgnoreCase)) - { - return 30; - } + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }; - return -1; - } + var sut = kernel.GetRequiredService(); + + // Act + var assistanceResponse = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + chatHistory.Add(assistanceResponse); // Adding assistance response to chat history. + chatHistory.AddUserMessage("Return only the color name."); - [KernelFunction] - public int InterpretValue(int value) => value * 2; + await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); } - public class WeatherPlugin + [Fact] + public async Task SubsetOfFunctionsCanBeUsedForFunctionCallingAsync() { - [KernelFunction, Description("Get current temperature.")] - public Task GetCurrentTemperatureAsync(WeatherParameters parameters) - { - if (parameters.City.Name == "Dublin" && (parameters.City.Country == "Ireland" || parameters.City.Country == "IE")) - { - return Task.FromResult(42.8); // 42.8 Fahrenheit. - } + // Arrange + var kernel = this.CreateAndInitializeKernel(); - throw new NotSupportedException($"Weather in {parameters.City.Name} ({parameters.City.Country}) is not supported."); - } + var function = kernel.CreateFunctionFromMethod(() => DayOfWeek.Friday.ToString(), "GetDayOfWeek", "Retrieves the current day of the week."); + kernel.ImportPluginFromFunctions("HelperFunctions", [function]); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What day is today?"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableFunctions([function.Metadata.ToOpenAIFunction()], true) }; + + var sut = kernel.GetRequiredService(); + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.NotNull(result); + Assert.Contains("Friday", result.Content, StringComparison.InvariantCulture); + } + + [Fact] + public async Task RequiredFunctionShouldBeCalledAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var function = kernel.CreateFunctionFromMethod(() => DayOfWeek.Friday.ToString(), "GetDayOfWeek", "Retrieves the current day of the week."); + kernel.ImportPluginFromFunctions("HelperFunctions", [function]); + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("What day is today?"); + + var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(function.Metadata.ToOpenAIFunction(), true) }; + + var sut = kernel.GetRequiredService(); + + // Act + var result = await sut.GetChatMessageContentAsync(chatHistory, settings, kernel); + + // Assert + Assert.NotNull(result); + Assert.Contains("Friday", result.Content, StringComparison.InvariantCulture); + } + + private Kernel CreateAndInitializeKernel(bool importHelperPlugin = false) + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(OpenAIConfiguration.ApiKey); - [KernelFunction, Description("Convert temperature from Fahrenheit to Celsius.")] - public Task ConvertTemperatureAsync(double temperatureInFahrenheit) + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey); + + var kernel = kernelBuilder.Build(); + + if (importHelperPlugin) { - double temperatureInCelsius = (temperatureInFahrenheit - 32) * 5 / 9; - return Task.FromResult(temperatureInCelsius); + kernel.ImportPluginFromFunctions("HelperFunctions", + [ + kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), + kernel.CreateFunctionFromMethod((string cityName) => + { + return cityName switch + { + "Boston" => "61 and rainy", + _ => "31 and snowing", + }; + }, "Get_Weather_For_City", "Gets the current weather for the specified city"), + ]); } + + return kernel; } public record WeatherParameters(City City); @@ -802,8 +965,6 @@ public class City public string Country { get; set; } = string.Empty; } - #region private - private sealed class FakeFunctionFilter : IFunctionInvocationFilter { private readonly Func, Task>? _onFunctionInvocation; @@ -818,36 +979,10 @@ public Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func dateMatchingLastDayName 'Tuesday' => Tuesday, 16 May, 2023")] - public string DateMatchingLastDayName( - [Description("The day name to match")] DayOfWeek input, - IFormatProvider? formatProvider = null) - { - DateTimeOffset dateTime = this._timeProvider.GetUtcNow(); - - // Walk backwards from the previous day for up to a week to find the matching day - for (int i = 1; i <= 7; ++i) - { - dateTime = dateTime.AddDays(-1); - if (dateTime.DayOfWeek == input) - { - break; - } - } - - return dateTime.ToString("D", formatProvider); - } - } + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs new file mode 100644 index 000000000000..e0f371252982 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NonStreamingTests.cs @@ -0,0 +1,169 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using OpenAI.Chat; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OpenAIChatCompletionNonStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await chatCompletion.GetChatMessageContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + // Act + var result = await chatCompletion.GetChatMessageContentAsync(chatHistory, null, kernel); + + // Assert + Assert.Contains("I don't know", result.Content); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokenCount", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokenCount", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + // Act + var result = await textGeneration.GetTextContentAsync("What is the capital of France?", settings, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var result = await textGeneration.GetTextContentAsync("Reply \"I don't know\" to every question. What is the capital of France?", null, kernel); + + // Assert + Assert.Contains("I don't know", result.Text); + Assert.NotNull(result.Metadata); + + Assert.True(result.Metadata.TryGetValue("Id", out object? id)); + Assert.NotNull(id); + + Assert.True(result.Metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(result.Metadata.ContainsKey("SystemFingerprint")); + + Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); + Assert.NotNull(usageObject); + + var jsonObject = JsonSerializer.SerializeToElement(usageObject); + Assert.True(jsonObject.TryGetProperty("InputTokenCount", out JsonElement promptTokensJson)); + Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); + Assert.NotEqual(0, promptTokens); + + Assert.True(jsonObject.TryGetProperty("OutputTokenCount", out JsonElement completionTokensJson)); + Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); + Assert.NotEqual(0, completionTokens); + + Assert.True(result.Metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + + Assert.True(result.Metadata.TryGetValue("ContentTokenLogProbabilities", out object? logProbabilityInfo)); + Assert.Empty((logProbabilityInfo as IReadOnlyList)!); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(OpenAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NoneFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NoneFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..eca02648e8d4 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_NoneFunctionChoiceBehaviorTests.cs @@ -0,0 +1,209 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +public sealed class OpenAINoneFunctionChoiceBehaviorTests : BaseIntegrationTest +{ + private readonly Kernel _kernel; + private readonly FakeFunctionFilter _autoFunctionInvocationFilter; + + public OpenAINoneFunctionChoiceBehaviorTests() + { + this._autoFunctionInvocationFilter = new FakeFunctionFilter(); + + this._kernel = this.InitializeKernel(); + this._kernel.AutoFunctionInvocationFilters.Add(this._autoFunctionInvocationFilter); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorNotToInvokeKernelFunctionAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); + this._kernel.Plugins.Add(plugin); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + // Act + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + var result = await this._kernel.InvokePromptAsync("How many days until Christmas?", new(settings)); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorNotToInvokeKernelFunctionAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """ + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: none + """; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + var result = await this._kernel.InvokeAsync(promptFunction); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorNotToInvokeKernelFunctionForStreamingAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); + this._kernel.Plugins.Add(plugin); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.None() }; + + // Act + await foreach (string update in this._kernel.InvokePromptStreamingAsync("How many days until Christmas?", new(settings))) + { + } + + // Assert + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorNotToInvokeKernelFunctionForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: none + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + await foreach (string update in promptFunction.InvokeStreamingAsync(this._kernel)) + { + } + + // Assert + Assert.Empty(invokedFunctions); + } + + private Kernel InitializeKernel() + { + var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + Assert.NotNull(openAIConfiguration.ChatModelId!); + Assert.NotNull(openAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: openAIConfiguration.ChatModelId, + apiKey: openAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// A plugin that returns the current time. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DateTimeUtils +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("Retrieves the current date.")] + public string GetCurrentDate() => DateTime.UtcNow.ToString("d", CultureInfo.InvariantCulture); + } + + #region private + + private sealed class FakeFunctionFilter : IAutoFunctionInvocationFilter + { + private Func, Task>? _onFunctionInvocation; + + public void RegisterFunctionInvocationHandler(Func, Task> onFunctionInvocation) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + if (this._onFunctionInvocation is null) + { + return next(context); + } + + return this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_RequiredFunctionChoiceBehaviorTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_RequiredFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..3bb20904e0c8 --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_RequiredFunctionChoiceBehaviorTests.cs @@ -0,0 +1,447 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +public sealed class OpenAIRequiredFunctionChoiceBehaviorTests : BaseIntegrationTest +{ + private readonly Kernel _kernel; + private readonly FakeFunctionFilter _autoFunctionInvocationFilter; + private readonly IChatCompletionService _chatCompletionService; + + public OpenAIRequiredFunctionChoiceBehaviorTests() + { + this._autoFunctionInvocationFilter = new FakeFunctionFilter(); + + this._kernel = this.InitializeKernel(); + this._kernel.AutoFunctionInvocationFilters.Add(this._autoFunctionInvocationFilter); + this._chatCompletionService = this._kernel.GetRequiredService(); + } + + //[Fact] + //This test should be uncommented when the solution to dynamically control list of functions to advertise to the model is implemented. + //public async Task SpecifiedInCodeInstructsConnectorToInvokeRequiredFunctionAutomaticallyForStreamingAsync() + //{ + // // Arrange + // this._kernel.ImportPluginFromType(); + + // var invokedFunctions = new List(); + + // IReadOnlyList? SelectFunctions(FunctionChoiceBehaviorFunctionsSelectorContext context) + // { + // // Get all function names that have been invoked + // var invokedFunctionNames = context.ChatHistory + // .SelectMany(m => m.Items.OfType()) + // .Select(i => i.FunctionName); + + // invokedFunctions.AddRange(invokedFunctionNames); + + // if (invokedFunctionNames.Contains("GetCurrentDate")) + // { + // return []; // Don't advertise any more functions because the expected function has been invoked. + // } + + // return context.Functions; + // } + + // var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true, functionsSelector: SelectFunctions) }; + + // var chatHistory = new ChatHistory(); + // chatHistory.AddUserMessage("How many days until Christmas?"); + + // // Act + // var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // // Assert + // Assert.NotNull(result); + + // Assert.Single(invokedFunctions); + // Assert.Contains("GetCurrentDate", invokedFunctions); + //} + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeRequiredFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: required + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + // Act + var result = await this._kernel.InvokeAsync(promptFunction); + + // Assert + Assert.NotNull(result); + + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + //[Fact] + //This test should be uncommented when the solution to dynamically control list of functions to advertise to the model is implemented. + //public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + //{ + // // Arrange + // this._kernel.ImportPluginFromType(); + + // var invokedFunctions = new List(); + + // IReadOnlyList? SelectFunctions(FunctionChoiceBehaviorFunctionsSelectorContext context) + // { + // // Get all function names that have been invoked + // var invokedFunctionNames = context.ChatHistory + // .SelectMany(m => m.Items.OfType()) + // .Select(i => i.FunctionName); + + // invokedFunctions.AddRange(invokedFunctionNames); + + // if (invokedFunctionNames.Contains("GetCurrentDate")) + // { + // return []; // Don't advertise any more functions because the expected function has been invoked. + // } + + // return context.Functions; + // } + + // var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true, functionsSelector: SelectFunctions) }; + + // var chatHistory = new ChatHistory(); + // chatHistory.AddUserMessage("How many days until Christmas?"); + + // // Act + // await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + // { + // } + + // // Assert + // Assert.Single(invokedFunctions); + // Assert.Contains("GetCurrentDate", invokedFunctions); + //} + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + } + + // Assert + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInPromptInstructsConnectorToInvokeKernelFunctionAutomaticallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var promptTemplate = """" + template_format: semantic-kernel + template: How many days until Christmas? + execution_settings: + default: + temperature: 0.1 + function_choice_behavior: + type: required + """"; + + var promptFunction = KernelFunctionYaml.FromPromptYaml(promptTemplate); + + string result = ""; + + // Act + await foreach (string c in promptFunction.InvokeStreamingAsync(this._kernel)) + { + result += c; + } + + // Assert + Assert.NotNull(result); + Assert.Contains("GetCurrentDate", invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeKernelFunctionManuallyForStreamingAsync() + { + // Arrange + this._kernel.ImportPluginFromType(); + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var settings = new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required([plugin.ElementAt(0)], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + var result = await this._chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, this._kernel); + + // Assert + Assert.NotNull(result); + + Assert.Empty(invokedFunctions); + + var functionCalls = FunctionCallContent.GetFunctionCalls(result); + Assert.NotNull(functionCalls); + Assert.NotEmpty(functionCalls); + + var functionCall = functionCalls.First(); + Assert.Equal("DateTimeUtils", functionCall.PluginName); + Assert.Equal("GetCurrentDate", functionCall.FunctionName); + } + + [Fact] + public async Task SpecifiedInCodeInstructsConnectorToInvokeNonKernelFunctionManuallyForStreamingAsync() + { + // Arrange + var plugin = this._kernel.CreatePluginFromType(); // Creating plugin without importing it to the kernel. + + var invokedFunctions = new List(); + + this._autoFunctionInvocationFilter.RegisterFunctionInvocationHandler(async (context, next) => + { + invokedFunctions.Add(context.Function.Name); + await next(context); + }); + + var functionsForManualInvocation = new List(); + + var settings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Required([plugin.ElementAt(0)], autoInvoke: false) }; + + var chatHistory = new ChatHistory(); + chatHistory.AddUserMessage("How many days until Christmas?"); + + // Act + await foreach (var content in this._chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, this._kernel)) + { + if (content is OpenAIStreamingChatMessageContent openAIContent && openAIContent.ToolCallUpdates is { Count: > 0 } && !string.IsNullOrEmpty(openAIContent.ToolCallUpdates[0].FunctionName)) + { + functionsForManualInvocation.Add(openAIContent.ToolCallUpdates[0].FunctionName); + } + } + + // Assert + Assert.Contains("DateTimeUtils-GetCurrentDate", functionsForManualInvocation); + + Assert.Empty(invokedFunctions); + } + + private Kernel InitializeKernel() + { + var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(openAIConfiguration); + Assert.NotNull(openAIConfiguration.ChatModelId!); + Assert.NotNull(openAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: openAIConfiguration.ChatModelId, + apiKey: openAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #region private + + /// + /// A plugin that returns the current time. + /// +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + private sealed class DateTimeUtils +#pragma warning restore CA1812 // Avoid uninstantiated internal classes + { + [KernelFunction] + [Description("Retrieves the current date.")] + public string GetCurrentDate() => DateTime.UtcNow.ToString("d", CultureInfo.InvariantCulture); + } + + private sealed class FakeFunctionFilter : IAutoFunctionInvocationFilter + { + private Func, Task>? _onFunctionInvocation; + + public void RegisterFunctionInvocationHandler(Func, Task> onFunctionInvocation) + { + this._onFunctionInvocation = onFunctionInvocation; + } + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) + { + if (this._onFunctionInvocation is null) + { + return next(context); + } + + return this._onFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } + } + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs new file mode 100644 index 000000000000..94f8da9617dc --- /dev/null +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletion_StreamingTests.cs @@ -0,0 +1,176 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Text; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; + +#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. + +public sealed class OpenAIChatCompletionStreamingTests : BaseIntegrationTest +{ + [Fact] + public async Task ChatCompletionShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update.Content); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task ChatCompletionShouldUseChatHistoryAndReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var chatCompletion = kernel.Services.GetRequiredService(); + + var chatHistory = new ChatHistory("Reply \"I don't know\" to every question."); + chatHistory.AddUserMessage("What is the capital of France?"); + + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in chatCompletion.GetStreamingChatMessageContentsAsync(chatHistory, null, kernel)) + { + stringBuilder.Append(update.Content); + + foreach (var key in update.Metadata!.Keys) + { + if (!metadata.TryGetValue(key, out var value) || value is null) + { + metadata[key] = update.Metadata[key]; + } + } + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("CompletionId", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + [Fact] + public async Task TextGenerationShouldUseChatSystemPromptAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; + + var stringBuilder = new StringBuilder(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", settings, kernel)) + { + stringBuilder.Append(update); + } + + // Assert + Assert.Contains("I don't know", stringBuilder.ToString()); + } + + [Fact] + public async Task TextGenerationShouldReturnMetadataAsync() + { + // Arrange + var kernel = this.CreateAndInitializeKernel(); + + var textGeneration = kernel.Services.GetRequiredService(); + + // Act + var stringBuilder = new StringBuilder(); + var metadata = new Dictionary(); + + // Act + await foreach (var update in textGeneration.GetStreamingTextContentsAsync("What is the capital of France?", null, kernel)) + { + stringBuilder.Append(update); + + foreach (var key in update.Metadata!.Keys) + { + if (!metadata.TryGetValue(key, out var value) || value is null) + { + metadata[key] = update.Metadata[key]; + } + } + } + + // Assert + Assert.NotNull(metadata); + + Assert.True(metadata.TryGetValue("CompletionId", out object? id)); + Assert.NotNull(id); + + Assert.True(metadata.TryGetValue("CreatedAt", out object? createdAt)); + Assert.NotNull(createdAt); + + Assert.True(metadata.ContainsKey("SystemFingerprint")); + + Assert.True(metadata.TryGetValue("FinishReason", out object? finishReason)); + Assert.Equal("Stop", finishReason); + } + + #region internals + + private Kernel CreateAndInitializeKernel() + { + var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get(); + Assert.NotNull(OpenAIConfiguration); + Assert.NotNull(OpenAIConfiguration.ChatModelId!); + Assert.NotNull(OpenAIConfiguration.ApiKey); + + var kernelBuilder = base.CreateKernelBuilder(); + + kernelBuilder.AddOpenAIChatCompletion( + modelId: OpenAIConfiguration.ChatModelId, + apiKey: OpenAIConfiguration.ApiKey); + + return kernelBuilder.Build(); + } + + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + #endregion +} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs deleted file mode 100644 index 675661b76d83..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs +++ /dev/null @@ -1,668 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Azure.AI.OpenAI; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Http.Resilience; -using Microsoft.Extensions.Logging; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. - -public sealed class OpenAICompletionTests(ITestOutputHelper output) : IDisposable -{ - private const string InputParameterName = "input"; - private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - - [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place Market")] - public async Task OpenAITestAsync(string prompt, string expectedAnswerContains) - { - // Arrange - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); - - this._kernelBuilder.Services.AddSingleton(this._logger); - Kernel target = this._kernelBuilder - .AddOpenAITextGeneration( - serviceId: openAIConfiguration.ServiceId, - modelId: openAIConfiguration.ModelId, - apiKey: openAIConfiguration.ApiKey) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place Market")] - public async Task OpenAIChatAsTextTestAsync(string prompt, string expectedAnswerContains) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - IKernelBuilder builder = this._kernelBuilder; - - this.ConfigureChatOpenAI(builder); - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact(Skip = "Skipping while we investigate issue with GitHub actions.")] - public async Task CanUseOpenAiChatForTextGenerationAsync() - { - // Note: we use OpenAI Chat Completion and GPT 3.5 Turbo - this._kernelBuilder.Services.AddSingleton(this._logger); - IKernelBuilder builder = this._kernelBuilder; - this.ConfigureChatOpenAI(builder); - - Kernel target = builder.Build(); - - var func = target.CreateFunctionFromPrompt( - "List the two planets after '{{$input}}', excluding moons, using bullet points.", - new OpenAIPromptExecutionSettings()); - - var result = await func.InvokeAsync(target, new() { [InputParameterName] = "Jupiter" }); - - Assert.NotNull(result); - Assert.Contains("Saturn", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); - Assert.Contains("Uranus", result.GetValue(), StringComparison.InvariantCultureIgnoreCase); - } - - [Theory] - [InlineData(false, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - [InlineData(true, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - public async Task AzureOpenAIStreamingTestAsync(bool useChatModel, string prompt, string expectedAnswerContains) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - - if (useChatModel) - { - this.ConfigureAzureOpenAIChatAsText(builder); - } - else - { - this.ConfigureAzureOpenAI(builder); - } - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - StringBuilder fullResult = new(); - // Act - await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt })) - { - if (content is StreamingChatMessageContent messageContent) - { - Assert.NotNull(messageContent.Role); - } - - fullResult.Append(content); - } - - // Assert - Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase); - } - - [Theory] - [InlineData(false, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - [InlineData(true, "Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")] - public async Task AzureOpenAITestAsync(bool useChatModel, string prompt, string expectedAnswerContains) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - - if (useChatModel) - { - this.ConfigureAzureOpenAIChatAsText(builder); - } - else - { - this.ConfigureAzureOpenAI(builder); - } - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - // If the test fails, please note that SK retry logic may not be fully integrated into the underlying code using Azure SDK - [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Resilience event occurred")] - public async Task OpenAIHttpRetryPolicyTestAsync(string prompt, string expectedOutput) - { - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); - - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - this._kernelBuilder - .AddOpenAITextGeneration( - serviceId: openAIConfiguration.ServiceId, - modelId: openAIConfiguration.ModelId, - apiKey: "INVALID_KEY"); // Use an invalid API key to force a 401 Unauthorized response - this._kernelBuilder.Services.ConfigureHttpClientDefaults(c => - { - // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example - c.AddStandardResilienceHandler().Configure(o => - { - o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); - }); - }); - Kernel target = this._kernelBuilder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act - await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); - - // Assert - Assert.Contains(expectedOutput, this._testOutputHelper.GetLogs(), StringComparison.OrdinalIgnoreCase); - } - - // If the test fails, please note that SK retry logic may not be fully integrated into the underlying code using Azure SDK - [Theory] - [InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Resilience event occurred")] - public async Task AzureOpenAIHttpRetryPolicyTestAsync(string prompt, string expectedOutput) - { - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - IKernelBuilder builder = this._kernelBuilder; - - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - // Use an invalid API key to force a 401 Unauthorized response - builder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: "INVALID_KEY"); - - builder.Services.ConfigureHttpClientDefaults(c => - { - // Use a standard resiliency policy, augmented to retry on 401 Unauthorized for this example - c.AddStandardResilienceHandler().Configure(o => - { - o.Retry.ShouldHandle = args => ValueTask.FromResult(args.Outcome.Result?.StatusCode is HttpStatusCode.Unauthorized); - }); - }); - - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act - await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = prompt })); - - // Assert - Assert.Contains(expectedOutput, this._testOutputHelper.GetLogs(), StringComparison.OrdinalIgnoreCase); - } - - [Theory] - [InlineData(false)] - [InlineData(true)] - public async Task AzureOpenAIShouldReturnMetadataAsync(bool useChatModel) - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - - if (useChatModel) - { - this.ConfigureAzureOpenAIChatAsText(this._kernelBuilder); - } - else - { - this.ConfigureAzureOpenAI(this._kernelBuilder); - } - - var kernel = this._kernelBuilder.Build(); - - var plugin = TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); - - // Act - var result = await kernel.InvokeAsync(plugin["FunPlugin"]["Limerick"]); - - // Assert - Assert.NotNull(result.Metadata); - - // Usage - Assert.True(result.Metadata.TryGetValue("Usage", out object? usageObject)); - Assert.NotNull(usageObject); - - var jsonObject = JsonSerializer.SerializeToElement(usageObject); - Assert.True(jsonObject.TryGetProperty("PromptTokens", out JsonElement promptTokensJson)); - Assert.True(promptTokensJson.TryGetInt32(out int promptTokens)); - Assert.NotEqual(0, promptTokens); - - Assert.True(jsonObject.TryGetProperty("CompletionTokens", out JsonElement completionTokensJson)); - Assert.True(completionTokensJson.TryGetInt32(out int completionTokens)); - Assert.NotEqual(0, completionTokens); - - // ContentFilterResults - Assert.True(result.Metadata.ContainsKey("ContentFilterResults")); - } - - [Fact] - public async Task OpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() - { - // Arrange - OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - Assert.NotNull(openAIConfiguration); - - // Use an invalid API key to force a 401 Unauthorized response - this._kernelBuilder.Services.AddSingleton(this._logger); - Kernel target = this._kernelBuilder - .AddOpenAITextGeneration( - modelId: openAIConfiguration.ModelId, - apiKey: "INVALID_KEY", - serviceId: openAIConfiguration.ServiceId) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act and Assert - var ex = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = "Any" })); - - Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)ex).StatusCode); - } - - [Fact] - public async Task AzureOpenAIHttpInvalidKeyShouldReturnErrorDetailAsync() - { - // Arrange - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - Kernel target = this._kernelBuilder - .AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: "INVALID_KEY", - serviceId: azureOpenAIConfiguration.ServiceId) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act and Assert - var ex = await Assert.ThrowsAsync(() => target.InvokeAsync(plugins["SummarizePlugin"]["Summarize"], new() { [InputParameterName] = "Any" })); - - Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)ex).StatusCode); - } - - [Fact] - public async Task AzureOpenAIHttpExceededMaxTokensShouldReturnErrorDetailAsync() - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - // Arrange - this._kernelBuilder.Services.AddSingleton(this._testOutputHelper); - Kernel target = this._kernelBuilder - .AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId) - .Build(); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "SummarizePlugin"); - - // Act - // Assert - await Assert.ThrowsAsync(() => plugins["SummarizePlugin"]["Summarize"].InvokeAsync(target, new() { [InputParameterName] = string.Join('.', Enumerable.Range(1, 40000)) })); - } - - [Theory(Skip = "This test is for manual verification.")] - [InlineData("\n", AIServiceType.OpenAI)] - [InlineData("\r\n", AIServiceType.OpenAI)] - [InlineData("\n", AIServiceType.AzureOpenAI)] - [InlineData("\r\n", AIServiceType.AzureOpenAI)] - public async Task CompletionWithDifferentLineEndingsAsync(string lineEnding, AIServiceType service) - { - // Arrange - var prompt = - "Given a json input and a request. Apply the request on the json input and return the result. " + - $"Put the result in between tags{lineEnding}" + - $$"""Input:{{lineEnding}}{"name": "John", "age": 30}{{lineEnding}}{{lineEnding}}Request:{{lineEnding}}name"""; - - const string ExpectedAnswerContains = "John"; - - this._kernelBuilder.Services.AddSingleton(this._logger); - Kernel target = this._kernelBuilder.Build(); - - this._serviceConfiguration[service](target); - - IReadOnlyKernelPluginCollection plugins = TestHelpers.ImportSamplePlugins(target, "ChatPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }); - - // Assert - Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task AzureOpenAIInvokePromptTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAI(builder); - Kernel target = builder.Build(); - - var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - - // Act - FunctionResult actual = await target.InvokePromptAsync(prompt, new(new OpenAIPromptExecutionSettings() { MaxTokens = 150 })); - - // Assert - Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase); - Assert.NotNull(actual.Metadata); - } - - [Fact] - public async Task AzureOpenAIInvokePromptWithMultipleResultsTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - Kernel target = builder.Build(); - - var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - - var executionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 150, ResultsPerPrompt = 3 }; - - // Act - FunctionResult actual = await target.InvokePromptAsync(prompt, new(executionSettings)); - - // Assert - Assert.Null(actual.Metadata); - - var chatMessageContents = actual.GetValue>(); - - Assert.NotNull(chatMessageContents); - Assert.Equal(executionSettings.ResultsPerPrompt, chatMessageContents.Count); - - foreach (var chatMessageContent in chatMessageContents) - { - Assert.NotNull(chatMessageContent.Metadata); - Assert.Contains("Pike Place", chatMessageContent.Content, StringComparison.OrdinalIgnoreCase); - } - } - - [Fact] - public async Task AzureOpenAIDefaultValueTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAI(builder); - Kernel target = builder.Build(); - - IReadOnlyKernelPluginCollection plugin = TestHelpers.ImportSamplePlugins(target, "FunPlugin"); - - // Act - FunctionResult actual = await target.InvokeAsync(plugin["FunPlugin"]["Limerick"]); - - // Assert - Assert.Contains("Bob", actual.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task MultipleServiceLoadPromptConfigTestAsync() - { - // Arrange - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAI(builder); - this.ConfigureInvalidAzureOpenAI(builder); - - Kernel target = builder.Build(); - - var prompt = "Where is the most famous fish market in Seattle, Washington, USA?"; - var defaultPromptModel = new PromptTemplateConfig(prompt) { Name = "FishMarket1" }; - var azurePromptModel = PromptTemplateConfig.FromJson(""" - { - "name": "FishMarket2", - "execution_settings": { - "azure-gpt-35-turbo-instruct": { - "max_tokens": 256 - } - } - } - """); - azurePromptModel.Template = prompt; - - var defaultFunc = target.CreateFunctionFromPrompt(defaultPromptModel); - var azureFunc = target.CreateFunctionFromPrompt(azurePromptModel); - - // Act - await Assert.ThrowsAsync(() => target.InvokeAsync(defaultFunc)); - - FunctionResult azureResult = await target.InvokeAsync(azureFunc); - - // Assert - Assert.Contains("Pike Place", azureResult.GetValue(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task ChatSystemPromptIsNotIgnoredAsync() - { - // Arrange - var settings = new OpenAIPromptExecutionSettings { ChatSystemPrompt = "Reply \"I don't know\" to every question." }; - - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - Kernel target = builder.Build(); - - // Act - var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?", new(settings)); - - // Assert - Assert.Contains("I don't know", result.ToString(), StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task SemanticKernelVersionHeaderIsSentAsync() - { - // Arrange - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - using var defaultHandler = new HttpClientHandler(); - using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler); - using var httpClient = new HttpClient(httpHeaderHandler); - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - builder.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId, - httpClient: httpClient); - Kernel target = builder.Build(); - - // Act - var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?"); - - // Assert - Assert.NotNull(httpHeaderHandler.RequestHeaders); - Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values)); - } - - [Theory(Skip = "This test is for manual verification.")] - [InlineData(null, null)] - [InlineData(false, null)] - [InlineData(true, 2)] - [InlineData(true, 5)] - public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs) - { - // Arrange - var settings = new OpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs }; - - this._kernelBuilder.Services.AddSingleton(this._logger); - var builder = this._kernelBuilder; - this.ConfigureAzureOpenAIChatAsText(builder); - Kernel target = builder.Build(); - - // Act - var result = await target.InvokePromptAsync("Hi, can you help me today?", new(settings)); - - var logProbabilityInfo = result.Metadata?["LogProbabilityInfo"] as ChatChoiceLogProbabilityInfo; - - // Assert - if (logprobs is true) - { - Assert.NotNull(logProbabilityInfo); - Assert.Equal(topLogprobs, logProbabilityInfo.TokenLogProbabilityResults[0].TopLogProbabilityEntries.Count); - } - else - { - Assert.Null(logProbabilityInfo); - } - } - - #region internals - - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - private readonly Dictionary> _serviceConfiguration = []; - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - - private void ConfigureChatOpenAI(IKernelBuilder kernelBuilder) - { - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - - Assert.NotNull(openAIConfiguration); - Assert.NotNull(openAIConfiguration.ChatModelId); - Assert.NotNull(openAIConfiguration.ApiKey); - Assert.NotNull(openAIConfiguration.ServiceId); - - kernelBuilder.AddOpenAIChatCompletion( - modelId: openAIConfiguration.ChatModelId, - apiKey: openAIConfiguration.ApiKey, - serviceId: openAIConfiguration.ServiceId); - } - - private void ConfigureAzureOpenAI(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.DeploymentName); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - kernelBuilder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId); - } - private void ConfigureInvalidAzureOpenAI(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.DeploymentName); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - - kernelBuilder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: "invalid-api-key", - serviceId: $"invalid-{azureOpenAIConfiguration.ServiceId}"); - } - - private void ConfigureAzureOpenAIChatAsText(IKernelBuilder kernelBuilder) - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - kernelBuilder.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, - serviceId: azureOpenAIConfiguration.ServiceId); - } - - private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler) - { - public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - this.RequestHeaders = request.Headers; - return await base.SendAsync(request, cancellationToken); - } - } - - #endregion -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs deleted file mode 100644 index 30b0c3d1115b..000000000000 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs +++ /dev/null @@ -1,156 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.SemanticKernel; -using Microsoft.SemanticKernel.Connectors.OpenAI; -using SemanticKernel.IntegrationTests.TestSettings; -using Xunit; -using Xunit.Abstractions; - -namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; - -#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only. - -public sealed class OpenAIFileServiceTests(ITestOutputHelper output) : IDisposable -{ - private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) - .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables() - .AddUserSecrets() - .Build(); - - [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] - [InlineData("test_image_001.jpg", "image/jpeg")] - [InlineData("test_content.txt", "text/plain")] - public async Task OpenAIFileServiceLifecycleAsync(string fileName, string mimeType) - { - // Arrange - OpenAIFileService fileService = this.CreateOpenAIFileService(); - - // Act & Assert - await this.VerifyFileServiceLifecycleAsync(fileService, fileName, mimeType); - } - - [Theory] - [InlineData("test_image_001.jpg", "image/jpeg")] - [InlineData("test_content.txt", "text/plain")] - public async Task AzureOpenAIFileServiceLifecycleAsync(string fileName, string mimeType) - { - // Arrange - OpenAIFileService fileService = this.CreateOpenAIFileService(); - - // Act & Assert - await this.VerifyFileServiceLifecycleAsync(fileService, fileName, mimeType); - } - - private async Task VerifyFileServiceLifecycleAsync(OpenAIFileService fileService, string fileName, string mimeType) - { - // Setup file content - await using FileStream fileStream = File.OpenRead($"./TestData/{fileName}"); - BinaryData sourceData = await BinaryData.FromStreamAsync(fileStream); - BinaryContent sourceContent = new(sourceData.ToArray(), mimeType); - - // Upload file with unsupported purpose (failure case) - await Assert.ThrowsAsync(() => fileService.UploadContentAsync(sourceContent, new(fileName, OpenAIFilePurpose.AssistantsOutput))); - - // Upload file with wacky purpose (failure case) - await Assert.ThrowsAsync(() => fileService.UploadContentAsync(sourceContent, new(fileName, new OpenAIFilePurpose("pretend")))); - - // Upload file - OpenAIFileReference fileReference = await fileService.UploadContentAsync(sourceContent, new(fileName, OpenAIFilePurpose.FineTune)); - try - { - AssertFileReferenceEquals(fileReference, fileName, sourceData.Length, OpenAIFilePurpose.FineTune); - - // Retrieve files by different purpose - Dictionary fileMap = await GetFilesAsync(fileService, OpenAIFilePurpose.Assistants); - Assert.DoesNotContain(fileReference.Id, fileMap.Keys); - - // Retrieve files by wacky purpose (failure case) - await Assert.ThrowsAsync(() => GetFilesAsync(fileService, new OpenAIFilePurpose("pretend"))); - - // Retrieve files by expected purpose - fileMap = await GetFilesAsync(fileService, OpenAIFilePurpose.FineTune); - Assert.Contains(fileReference.Id, fileMap.Keys); - AssertFileReferenceEquals(fileMap[fileReference.Id], fileName, sourceData.Length, OpenAIFilePurpose.FineTune); - - // Retrieve files by no specific purpose - fileMap = await GetFilesAsync(fileService); - Assert.Contains(fileReference.Id, fileMap.Keys); - AssertFileReferenceEquals(fileMap[fileReference.Id], fileName, sourceData.Length, OpenAIFilePurpose.FineTune); - - // Retrieve file by id - OpenAIFileReference file = await fileService.GetFileAsync(fileReference.Id); - AssertFileReferenceEquals(file, fileName, sourceData.Length, OpenAIFilePurpose.FineTune); - - // Retrieve file content - BinaryContent retrievedContent = await fileService.GetFileContentAsync(fileReference.Id); - Assert.NotNull(retrievedContent.Data); - Assert.NotNull(retrievedContent.Uri); - Assert.NotNull(retrievedContent.Metadata); - Assert.Equal(fileReference.Id, retrievedContent.Metadata["id"]); - Assert.Equal(sourceContent.Data!.Value.Length, retrievedContent.Data.Value.Length); - } - finally - { - // Delete file - await fileService.DeleteFileAsync(fileReference.Id); - } - } - - private static void AssertFileReferenceEquals(OpenAIFileReference fileReference, string expectedFileName, int expectedSize, OpenAIFilePurpose expectedPurpose) - { - Assert.Equal(expectedFileName, fileReference.FileName); - Assert.Equal(expectedPurpose, fileReference.Purpose); - Assert.Equal(expectedSize, fileReference.SizeInBytes); - } - - private static async Task> GetFilesAsync(OpenAIFileService fileService, OpenAIFilePurpose? purpose = null) - { - IEnumerable files = await fileService.GetFilesAsync(purpose); - Dictionary fileIds = files.DistinctBy(f => f.Id).ToDictionary(f => f.Id); - return fileIds; - } - - #region internals - - private readonly XunitLogger _logger = new(output); - private readonly RedirectOutput _testOutputHelper = new(output); - - public void Dispose() - { - this._logger.Dispose(); - this._testOutputHelper.Dispose(); - } - - private OpenAIFileService CreateOpenAIFileService() - { - var openAIConfiguration = this._configuration.GetSection("OpenAI").Get(); - - Assert.NotNull(openAIConfiguration); - Assert.NotNull(openAIConfiguration.ApiKey); - Assert.NotNull(openAIConfiguration.ServiceId); - - return new(openAIConfiguration.ApiKey, openAIConfiguration.ServiceId, loggerFactory: this._logger); - } - - private OpenAIFileService CreateAzureOpenAIFileService() - { - var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); - - Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); - Assert.NotNull(azureOpenAIConfiguration.ServiceId); - - return new(new Uri(azureOpenAIConfiguration.Endpoint), azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.ServiceId, loggerFactory: this._logger); - } - - #endregion -} diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs index 74f63fa3fabd..bccc92bfa0f3 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextEmbeddingTests.cs @@ -13,7 +13,7 @@ public sealed class OpenAITextEmbeddingTests { private const int AdaVectorLength = 1536; private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() @@ -60,49 +60,4 @@ public async Task OpenAIWithDimensionsAsync(int? dimensions, int expectedVectorL // Assert Assert.Equal(expectedVectorLength, result.Length); } - - [Theory] - [InlineData("test sentence")] - public async Task AzureOpenAITestAsync(string testInputString) - { - // Arrange - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService(azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey); - - // Act - var singleResult = await embeddingGenerator.GenerateEmbeddingAsync(testInputString); - var batchResult = await embeddingGenerator.GenerateEmbeddingsAsync([testInputString, testInputString, testInputString]); - - // Assert - Assert.Equal(AdaVectorLength, singleResult.Length); - Assert.Equal(3, batchResult.Count); - } - - [Theory] - [InlineData(null, 3072)] - [InlineData(1024, 1024)] - public async Task AzureOpenAIWithDimensionsAsync(int? dimensions, int expectedVectorLength) - { - // Arrange - const string TestInputString = "test sentence"; - - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAIEmbeddings").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var embeddingGenerator = new AzureOpenAITextEmbeddingGenerationService( - "text-embedding-3-large", - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey, - dimensions: dimensions); - - // Act - var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString); - - // Assert - Assert.Equal(expectedVectorLength, result.Length); - } } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs index e35c357cf375..c2818abe2502 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs @@ -12,13 +12,13 @@ namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAITextToAudioTests { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .AddUserSecrets() .Build(); - [Fact(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] + [Fact]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")] public async Task OpenAITextToAudioTestAsync() { // Arrange @@ -38,28 +38,4 @@ public async Task OpenAITextToAudioTestAsync() var audioData = result.Data!.Value; Assert.False(audioData.IsEmpty); } - - [Fact] - public async Task AzureOpenAITextToAudioTestAsync() - { - // Arrange - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToAudio").Get(); - Assert.NotNull(azureOpenAIConfiguration); - - var kernel = Kernel.CreateBuilder() - .AddAzureOpenAITextToAudio( - azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey) - .Build(); - - var service = kernel.GetRequiredService(); - - // Act - var result = await service.GetAudioContentAsync("The sun rises in the east and sets in the west."); - - // Assert - var audioData = result.Data!.Value; - Assert.False(audioData.IsEmpty); - } } diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs index e133f91ee547..07524b592973 100644 --- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs +++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs @@ -3,83 +3,86 @@ using System.Threading.Tasks; using Microsoft.Extensions.Configuration; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.TextToImage; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; +#pragma warning disable CS0618 // Type or member is obsolete + namespace SemanticKernel.IntegrationTests.Connectors.OpenAI; public sealed class OpenAITextToImageTests { private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() - .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() - .AddUserSecrets() + .AddUserSecrets() .Build(); - [Fact(Skip = "This test is for manual verification.")] - public async Task OpenAITextToImageTestAsync() + [Theory(Skip = "This test is for manual verification.")] + [InlineData("dall-e-2", 512, 512)] + [InlineData("dall-e-3", 1024, 1024)] + public async Task OpenAITextToImageByModelTestAsync(string modelId, int width, int height) { // Arrange OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get(); Assert.NotNull(openAIConfiguration); var kernel = Kernel.CreateBuilder() - .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey) + .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: modelId) .Build(); var service = kernel.GetRequiredService(); // Act - var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 512, 512); + var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", width, height); // Assert Assert.NotNull(result); Assert.NotEmpty(result); } - [Fact(Skip = "This test is for manual verification.")] - public async Task OpenAITextToImageByModelTestAsync() + [Fact] + public async Task OpenAITextToImageUseDallE2ByDefaultAsync() { // Arrange OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get(); Assert.NotNull(openAIConfiguration); var kernel = Kernel.CreateBuilder() - .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: openAIConfiguration.ModelId) + .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey) .Build(); var service = kernel.GetRequiredService(); // Act - var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024); + var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 256, 256); // Assert Assert.NotNull(result); Assert.NotEmpty(result); } - [Fact(Skip = "This test is for manual verification.")] - public async Task AzureOpenAITextToImageTestAsync() + [Fact] + public async Task OpenAITextToImageDalle3GetImagesTestAsync() { // Arrange - AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToImage").Get(); - Assert.NotNull(azureOpenAIConfiguration); + OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get(); + Assert.NotNull(openAIConfiguration); var kernel = Kernel.CreateBuilder() - .AddAzureOpenAITextToImage( - azureOpenAIConfiguration.DeploymentName, - azureOpenAIConfiguration.Endpoint, - azureOpenAIConfiguration.ApiKey) + .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: "dall-e-3") .Build(); var service = kernel.GetRequiredService(); // Act - var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024); + var result = await service.GetImageContentsAsync("The sun rises in the east and sets in the west.", new OpenAITextToImageExecutionSettings { Size = (1024, 1024) }); // Assert Assert.NotNull(result); Assert.NotEmpty(result); + Assert.NotEmpty(result[0].Uri!.ToString()); } } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json new file mode 100644 index 000000000000..1a85a5330b24 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesStreamingTest.json @@ -0,0 +1,21 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + }, + { + "content": "Sure! The time in Seattle is currently 3:00 PM.", + "role": "assistant" + }, + { + "content": "What about New York?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json index 397d351c0f50..959c4f62fe15 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithChatRolesTest.json @@ -13,10 +13,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json new file mode 100644 index 000000000000..02f714872433 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json @@ -0,0 +1,13 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json index 8445e850bbb4..8d23881d66ff 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithComplexObjectsTest.json @@ -5,10 +5,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json new file mode 100644 index 000000000000..f9472d3f2da0 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json @@ -0,0 +1,17 @@ +{ + "messages": [ + { + "content": "The current time is Sun, 04 Jun 1989 12:11:13 GMT", + "role": "system" + }, + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json index 571ddbcd55c6..cc0b8acb9f2e 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithHelperFunctionsTest.json @@ -9,10 +9,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json new file mode 100644 index 000000000000..02f714872433 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json @@ -0,0 +1,13 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json index 8445e850bbb4..8d23881d66ff 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/PromptWithSimpleVariableTest.json @@ -5,10 +5,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json new file mode 100644 index 000000000000..02f714872433 --- /dev/null +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptStreamingTest.json @@ -0,0 +1,13 @@ +{ + "messages": [ + { + "content": "Can you help me tell the time in Seattle right now?", + "role": "user" + } + ], + "model": "Dummy", + "stream": true, + "stream_options": { + "include_usage": true + } +} \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json index 8445e850bbb4..8d23881d66ff 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json +++ b/dotnet/src/IntegrationTests/CrossLanguage/Data/SimplePromptTest.json @@ -5,10 +5,5 @@ "role": "user" } ], - "temperature": 1, - "top_p": 1, - "n": 1, - "presence_penalty": 0, - "frequency_penalty": 0, "model": "Dummy" } \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs b/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs index bbc55dfabfda..1621ffdfbfa8 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/KernelRequestTracer.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.IO; using System.Net; using System.Net.Http; using System.Text; @@ -39,6 +40,7 @@ internal sealed class KernelRequestTracer : IDisposable ] }"; + private MemoryStream? _memoryDummyResponse; private HttpClient? _httpClient; private HttpMessageHandlerStub? _httpMessageHandlerStub; @@ -134,17 +136,17 @@ private void DisposeHttpResources() { this._httpClient?.Dispose(); this._httpMessageHandlerStub?.Dispose(); + this._memoryDummyResponse?.Dispose(); } private void ResetHttpComponents() { this.DisposeHttpResources(); - + this._memoryDummyResponse = new MemoryStream(Encoding.UTF8.GetBytes(DummyResponse)); this._httpMessageHandlerStub = new HttpMessageHandlerStub(); this._httpMessageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK) { - Content = new StringContent(DummyResponse, - Encoding.UTF8, "application/json") + Content = new StreamContent(this._memoryDummyResponse) }; this._httpClient = new HttpClient(this._httpMessageHandlerStub); } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs index 1e43ec9a4f93..fe12882d2dca 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithChatRolesTest.cs @@ -30,15 +30,13 @@ public async Task PromptWithChatRolesAsync(bool isInline, bool isStreaming, stri JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithChatRolesTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithChatRolesStreamingTest.json" + : "./CrossLanguage/Data/PromptWithChatRolesTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); - if (isStreaming) - { - expectedObject["stream"] = true; - } - Assert.True(JsonNode.DeepEquals(obtainedObject, expectedObject)); } } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs index 87fb3e1c888d..b8a9a9b275ea 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs @@ -39,15 +39,13 @@ public async Task PromptWithComplexObjectsAsync(bool isInline, bool isStreaming, JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithComplexObjectsTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithComplexObjectsStreamingTest.json" + : "./CrossLanguage/Data/PromptWithComplexObjectsTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); - if (isStreaming) - { - expectedObject["stream"] = true; - } - Assert.True(JsonNode.DeepEquals(obtainedObject, expectedObject)); } } diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs index 12d7166e0bb5..ab192c2429cc 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs @@ -37,7 +37,10 @@ public async Task PromptWithHelperFunctionsAsync(bool isInline, bool isStreaming JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithHelperFunctionsTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithHelperFunctionsStreamingTest.json" + : "./CrossLanguage/Data/PromptWithHelperFunctionsTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs index 80fa3bd5ae3e..af23d6b462ea 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithSimpleVariableTest.cs @@ -34,7 +34,10 @@ public async Task PromptWithSimpleVariableAsync(bool isInline, bool isStreaming, JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/PromptWithSimpleVariableTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json" + : "./CrossLanguage/Data/PromptWithSimpleVariableTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs index d9cfa268ca49..46580dce8135 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/SimplePromptTest.cs @@ -30,7 +30,10 @@ public async Task SimplePromptAsync(bool isInline, bool isStreaming, string temp JsonNode? obtainedObject = JsonNode.Parse(requestContent); Assert.NotNull(obtainedObject); - string expected = await File.ReadAllTextAsync("./CrossLanguage/Data/SimplePromptTest.json"); + string expected = await File.ReadAllTextAsync(isStreaming + ? "./CrossLanguage/Data/SimplePromptStreamingTest.json" + : "./CrossLanguage/Data/SimplePromptTest.json"); + JsonNode? expectedObject = JsonNode.Parse(expected); Assert.NotNull(expectedObject); diff --git a/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs index 084bcefbfd5f..8b0805165437 100644 --- a/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs +++ b/dotnet/src/IntegrationTests/CrossLanguage/YamlPromptTest.cs @@ -13,11 +13,11 @@ public class YamlPromptTest { [Theory] [InlineData(false, "./CrossLanguage/Data/SimplePromptTest.yaml", "./CrossLanguage/Data/SimplePromptTest.json")] - [InlineData(true, "./CrossLanguage/Data/SimplePromptTest.yaml", "./CrossLanguage/Data/SimplePromptTest.json")] + [InlineData(true, "./CrossLanguage/Data/SimplePromptTest.yaml", "./CrossLanguage/Data/SimplePromptStreamingTest.json")] [InlineData(false, "./CrossLanguage/Data/PromptWithChatRolesTest-HB.yaml", "./CrossLanguage/Data/PromptWithChatRolesTest.json")] - [InlineData(true, "./CrossLanguage/Data/PromptWithChatRolesTest-HB.yaml", "./CrossLanguage/Data/PromptWithChatRolesTest.json")] + [InlineData(true, "./CrossLanguage/Data/PromptWithChatRolesTest-HB.yaml", "./CrossLanguage/Data/PromptWithChatRolesStreamingTest.json")] [InlineData(false, "./CrossLanguage/Data/PromptWithSimpleVariableTest.yaml", "./CrossLanguage/Data/PromptWithSimpleVariableTest.json")] - [InlineData(true, "./CrossLanguage/Data/PromptWithSimpleVariableTest.yaml", "./CrossLanguage/Data/PromptWithSimpleVariableTest.json")] + [InlineData(true, "./CrossLanguage/Data/PromptWithSimpleVariableTest.yaml", "./CrossLanguage/Data/PromptWithSimpleVariableStreamingTest.json")] public async Task YamlPromptAsync(bool isStreaming, string promptPath, string expectedResultPath) { using var kernelProvider = new KernelRequestTracer(); diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj index 55a6ac6d1006..a7508d32e0d1 100644 --- a/dotnet/src/IntegrationTests/IntegrationTests.csproj +++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj @@ -5,7 +5,7 @@ net8.0 true false - $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0110 + $(NoWarn);CA2007,CA1861,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0080,SKEXP0110 b7762d10-e29b-4bb1-8b74-b6d69a667dd4 @@ -18,16 +18,22 @@ + + + + + + @@ -60,15 +66,17 @@ + + + - @@ -79,9 +87,13 @@ + + + + - + @@ -103,24 +115,39 @@ PreserveNewest + + PreserveNewest + PreserveNewest PreserveNewest + + PreserveNewest + PreserveNewest + + PreserveNewest + PreserveNewest + + PreserveNewest + PreserveNewest PreserveNewest + + PreserveNewest + PreserveNewest @@ -156,6 +183,10 @@ + + + + Always diff --git a/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs index e87bbc8d4813..b7859de35937 100644 --- a/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/Handlebars/HandlebarsPlannerTests.cs @@ -3,6 +3,7 @@ using System; using System.ComponentModel; using System.Threading.Tasks; +using Azure.Identity; using Microsoft.Extensions.Configuration; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; @@ -16,13 +17,13 @@ namespace SemanticKernel.IntegrationTests.Planners.Handlebars; public sealed class HandlebarsPlannerTests { - [Theory] - [InlineData(true, "Write a joke and send it in an e-mail to Kai.", "SendEmail", "test")] - public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string goal, string expectedFunction, string expectedPlugin) + [Theory(Skip = "This test is for manual verification.")] + [InlineData("Write a joke and send it in an e-mail to Kai.", "SendEmail", "test")] + public async Task CreatePlanFunctionFlowAsync(string goal, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = false; - var kernel = this.InitializeKernel(useEmbeddings, useChatModel); + var kernel = this.InitializeKernel(useEmbeddings); kernel.ImportPluginFromType(expectedPlugin); TestHelpers.ImportSamplePlugins(kernel, "FunPlugin"); @@ -37,7 +38,7 @@ public async Task CreatePlanFunctionFlowAsync(bool useChatModel, string goal, st ); } - [RetryTheory] + [RetryTheory(Skip = "This test is for manual verification.")] [InlineData("Write a novel about software development that is 3 chapters long.", "NovelChapter", "WriterPlugin")] public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFunction, string expectedPlugin) { @@ -56,8 +57,8 @@ public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFuncti ); } - [Theory] - [InlineData(true, "List each property of the default Qux object.", "## Complex types", """ + [Theory(Skip = "This test is for manual verification.")] + [InlineData("List each property of the default Qux object.", "## Complex types", """ ### Qux: { "type": "Object", @@ -71,11 +72,11 @@ public async Task CreatePlanWithDefaultsAsync(string goal, string expectedFuncti } } """, "GetDefaultQux", "Foo")] - public async Task CreatePlanWithComplexTypesDefinitionsAsync(bool useChatModel, string goal, string expectedSectionHeader, string expectedTypeHeader, string expectedFunction, string expectedPlugin) + public async Task CreatePlanWithComplexTypesDefinitionsAsync(string goal, string expectedSectionHeader, string expectedTypeHeader, string expectedFunction, string expectedPlugin) { // Arrange bool useEmbeddings = false; - var kernel = this.InitializeKernel(useEmbeddings, useChatModel); + var kernel = this.InitializeKernel(useEmbeddings); kernel.ImportPluginFromObject(new Foo()); // Act @@ -103,7 +104,7 @@ public async Task CreatePlanWithComplexTypesDefinitionsAsync(bool useChatModel, ); } - private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = true) + private Kernel InitializeKernel(bool useEmbeddings = false) { AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); @@ -113,22 +114,11 @@ private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = IKernelBuilder builder = Kernel.CreateBuilder(); - if (useChatModel) - { - builder.Services.AddAzureOpenAIChatCompletion( - deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, - modelId: azureOpenAIConfiguration.ChatModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); - } - else - { - builder.Services.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, - endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); - } + builder.Services.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, + modelId: azureOpenAIConfiguration.ChatModelId, + endpoint: azureOpenAIConfiguration.Endpoint, + credentials: new AzureCliCredential()); if (useEmbeddings) { @@ -136,7 +126,7 @@ private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = deploymentName: azureOpenAIEmbeddingsConfiguration.DeploymentName, modelId: azureOpenAIEmbeddingsConfiguration.EmbeddingModelId, endpoint: azureOpenAIEmbeddingsConfiguration.Endpoint, - apiKey: azureOpenAIEmbeddingsConfiguration.ApiKey); + credential: new AzureCliCredential()); } return builder.Build(); diff --git a/dotnet/src/IntegrationTests/Planners/PlanTests.cs b/dotnet/src/IntegrationTests/Planners/PlanTests.cs index df329d068085..c496b3488a78 100644 --- a/dotnet/src/IntegrationTests/Planners/PlanTests.cs +++ b/dotnet/src/IntegrationTests/Planners/PlanTests.cs @@ -569,14 +569,14 @@ private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = c.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); } else { c.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); } if (useEmbeddings) diff --git a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs index da15ecf935ee..679df8afe3c1 100644 --- a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs +++ b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlanParserTests.cs @@ -34,7 +34,7 @@ public void CanCallToPlanFromXml() .WithAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, + credentials: new AzureCliCredential(), serviceId: azureOpenAIConfiguration.ServiceId) .Build(); kernel.ImportPluginFromType("email"); diff --git a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs index 7eef3864cf8c..ecddc781a049 100644 --- a/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/SequentialPlanner/SequentialPlannerTests.cs @@ -121,14 +121,14 @@ private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = builder.Services.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); } else { builder.Services.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); } if (useEmbeddings) diff --git a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs index f4e743fdb989..9854e63b28ce 100644 --- a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/FunctionCallingStepwisePlannerTests.cs @@ -75,7 +75,7 @@ private Kernel InitializeKernel(bool useEmbeddings = false) .WithAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); if (useEmbeddings) { builder.WithAzureOpenAITextEmbeddingGeneration( diff --git a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs index 4a82dde5b7f2..99a79dab7577 100644 --- a/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs +++ b/dotnet/src/IntegrationTests/Planners/StepwisePlanner/StepwisePlannerTests.cs @@ -150,14 +150,14 @@ private Kernel InitializeKernel(bool useEmbeddings = false, bool useChatModel = builder.Services.AddAzureOpenAIChatCompletion( deploymentName: azureOpenAIConfiguration.ChatDeploymentName!, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); } else { builder.Services.AddAzureOpenAITextGeneration( deploymentName: azureOpenAIConfiguration.DeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey); + credentials: new AzureCliCredential()); } if (useEmbeddings) diff --git a/dotnet/src/IntegrationTests/Plugins/OpenApi/repair-service.json b/dotnet/src/IntegrationTests/Plugins/OpenApi/repair-service.json index 1d5cc22bcbd3..ebf9f5e22c3f 100644 --- a/dotnet/src/IntegrationTests/Plugins/OpenApi/repair-service.json +++ b/dotnet/src/IntegrationTests/Plugins/OpenApi/repair-service.json @@ -6,9 +6,9 @@ "version": "1.0.0" }, "servers": [ - { - "url": "https://piercerepairsapi.azurewebsites.net/" - } + { + "url": "https://piercerepairsapi.azurewebsites.net" + } ], "paths": { "/repairs": { diff --git a/dotnet/src/IntegrationTests/Processes/ProcessCycleTests.cs b/dotnet/src/IntegrationTests/Processes/ProcessCycleTests.cs new file mode 100644 index 000000000000..6487b7534d14 --- /dev/null +++ b/dotnet/src/IntegrationTests/Processes/ProcessCycleTests.cs @@ -0,0 +1,172 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using System.Threading.Tasks; +using System; +using Microsoft.Extensions.Configuration; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; +using SemanticKernel.IntegrationTests.Agents; + +namespace SemanticKernel.IntegrationTests.Processes; + +public sealed class ProcessCycleTests +{ + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// Tests a process which cycles a fixed number of times and then exits. + /// + /// A + [Fact] + public async Task TestCycleAndExitWithFanInAsync() + { + // Arrange + OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!; + this._kernelBuilder.AddOpenAIChatCompletion( + modelId: configuration.ModelId!, + apiKey: configuration.ApiKey); + + Kernel kernel = this._kernelBuilder.Build(); + + ProcessBuilder process = new("Test Process"); + + var kickoffStep = process.AddStepFromType(); + var myAStep = process.AddStepFromType(); + var myBStep = process.AddStepFromType(); + var myCStep = process.AddStepFromType(); + + process + .OnInputEvent(CommonEvents.StartProcess) + .SendEventTo(new ProcessFunctionTargetBuilder(kickoffStep)); + + kickoffStep + .OnEvent(CommonEvents.StartARequested) + .SendEventTo(new ProcessFunctionTargetBuilder(myAStep)); + + kickoffStep + .OnEvent(CommonEvents.StartBRequested) + .SendEventTo(new ProcessFunctionTargetBuilder(myBStep)); + + myAStep + .OnEvent(CommonEvents.AStepDone) + .SendEventTo(new ProcessFunctionTargetBuilder(myCStep, parameterName: "astepdata")); + + myBStep + .OnEvent(CommonEvents.BStepDone) + .SendEventTo(new ProcessFunctionTargetBuilder(myCStep, parameterName: "bstepdata")); + + myCStep + .OnEvent(CommonEvents.CStepDone) + .SendEventTo(new ProcessFunctionTargetBuilder(kickoffStep)); + + myCStep + .OnEvent(CommonEvents.ExitRequested) + .StopProcess(); + + KernelProcess kernelProcess = process.Build(); + + Console.WriteLine("starting"); + await kernelProcess.StartAsync(kernel, new KernelProcessEvent() { Id = CommonEvents.StartProcess, Data = "foo" }); + Console.WriteLine("finished"); + } + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + // These classes are dynamically instantiated by the processes used in tests. + + /// + /// Kick off step for the process. + /// + private sealed class KickoffStep : KernelProcessStep + { + public static class Functions + { + public const string KickOff = nameof(KickOff); + } + + [KernelFunction(Functions.KickOff)] + public async ValueTask PrintWelcomeMessageAsync(KernelProcessStepContext context) + { + await context.EmitEventAsync(new() { Id = CommonEvents.StartARequested, Data = "Get Going A" }); + await context.EmitEventAsync(new() { Id = CommonEvents.StartBRequested, Data = "Get Going B" }); + } + } + + /// + /// A step in the process. + /// + private sealed class AStep : KernelProcessStep + { + [KernelFunction] + public async ValueTask DoItAsync(KernelProcessStepContext context) + { + await Task.Delay(TimeSpan.FromSeconds(1)); + await context.EmitEventAsync(new() { Id = CommonEvents.AStepDone }); + } + } + + /// + /// A step in the process. + /// + private sealed class BStep : KernelProcessStep + { + [KernelFunction] + public async ValueTask DoItAsync(KernelProcessStepContext context) + { + await Task.Delay(TimeSpan.FromSeconds(2)); + await context.EmitEventAsync(new() { Id = CommonEvents.BStepDone }); + } + } + + /// + /// A step in the process. + /// + private sealed class CStep : KernelProcessStep + { + private int CurrentCycle { get; set; } = 0; + + public CStep() + { + this.CurrentCycle = 0; + } + + [KernelFunction] + public async ValueTask DoItAsync(KernelProcessStepContext context, string astepdata, string bstepdata) + { + this.CurrentCycle++; + if (this.CurrentCycle == 3) + { + // Exit the processes + await context.EmitEventAsync(new() { Id = CommonEvents.ExitRequested }); + return; + } + + // Cycle back to the start + await context.EmitEventAsync(new() { Id = CommonEvents.CStepDone }); + } + } + + /// + /// Common Events used in the process. + /// + private static class CommonEvents + { + public const string UserInputReceived = nameof(UserInputReceived); + public const string CompletionResponseGenerated = nameof(CompletionResponseGenerated); + public const string WelcomeDone = nameof(WelcomeDone); + public const string AStepDone = nameof(AStepDone); + public const string BStepDone = nameof(BStepDone); + public const string CStepDone = nameof(CStepDone); + public const string StartARequested = nameof(StartARequested); + public const string StartBRequested = nameof(StartBRequested); + public const string ExitRequested = nameof(ExitRequested); + public const string StartProcess = nameof(StartProcess); + } +#pragma warning restore CA1812 // Avoid uninstantiated internal classes +} diff --git a/dotnet/src/IntegrationTests/Processes/ProcessTests.cs b/dotnet/src/IntegrationTests/Processes/ProcessTests.cs new file mode 100644 index 000000000000..30dd878bcdbd --- /dev/null +++ b/dotnet/src/IntegrationTests/Processes/ProcessTests.cs @@ -0,0 +1,256 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.SemanticKernel; +using SemanticKernel.IntegrationTests.Agents; +using SemanticKernel.IntegrationTests.TestSettings; +using Xunit; + +namespace SemanticKernel.IntegrationTests.Processes; +public sealed class ProcessTests +{ + private readonly IKernelBuilder _kernelBuilder = Kernel.CreateBuilder(); + private readonly IConfigurationRoot _configuration = new ConfigurationBuilder() + .AddJsonFile(path: "testsettings.json", optional: true, reloadOnChange: true) + .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables() + .AddUserSecrets() + .Build(); + + /// + /// Tests a simple linear process with two steps and no sub processes. + /// + /// A + [Fact] + public async Task LinearProcessAsync() + { + // Arrange + OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!; + this._kernelBuilder.AddOpenAIChatCompletion( + modelId: configuration.ModelId!, + apiKey: configuration.ApiKey); + + Kernel kernel = this._kernelBuilder.Build(); + var process = this.CreateLinearProcess("Simple").Build(); + + // Act + string testInput = "Test"; + var processHandle = await process.StartAsync(kernel, new() { Id = ProcessTestsEvents.StartProcess, Data = testInput }); + var processInfo = await processHandle.GetStateAsync(); + + // Assert + var repeatStepState = processInfo.Steps.Where(s => s.State.Name == nameof(RepeatStep)).FirstOrDefault()?.State as KernelProcessStepState; + Assert.NotNull(repeatStepState?.State); + Assert.Equal(string.Join(" ", Enumerable.Repeat(testInput, 2)), repeatStepState.State.LastMessage); + } + + /// + /// Tests a process with three steps where the third step is a nested process. Events from the outer process + /// are routed to the inner process. + /// + /// A + [Fact] + public async Task NestedProcessOuterToInnerWorksAsync() + { + // Arrange + OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!; + this._kernelBuilder.AddOpenAIChatCompletion( + modelId: configuration.ModelId!, + apiKey: configuration.ApiKey); + + Kernel kernel = this._kernelBuilder.Build(); + + // Create the outer process + var processBuilder = this.CreateLinearProcess("Outer"); + + // Create the inner process and add it as a step to the outer process + var nestedProcessStep = processBuilder.AddStepFromProcess(this.CreateLinearProcess("Inner")); + + // Route the last step of the outer process to trigger the external event that starts the inner process + processBuilder.Steps[1].OnEvent(ProcessTestsEvents.OutputReadyInternal) + .SendEventTo(nestedProcessStep.WhereInputEventIs(ProcessTestsEvents.StartProcess)); + + // Build the outer process + var process = processBuilder.Build(); + + // Act + string testInput = "Test"; + var processHandle = await process.StartAsync(kernel, new() { Id = ProcessTestsEvents.StartProcess, Data = testInput }); + var processInfo = await processHandle.GetStateAsync(); + + // Assert + var innerProcess = processInfo.Steps.Where(s => s.State.Name == "Inner").Single() as KernelProcess; + Assert.NotNull(innerProcess); + var repeatStepState = innerProcess.Steps.Where(s => s.State.Name == nameof(RepeatStep)).Single().State as KernelProcessStepState; + Assert.NotNull(repeatStepState?.State); + Assert.Equal(string.Join(" ", Enumerable.Repeat(testInput, 4)), repeatStepState.State.LastMessage); + } + + /// + /// Tests a process with three steps where the third step is a nested process. Events from the inner process + /// are routed to the outer process. + /// + /// A + [Fact] + public async Task NestedProcessInnerToOuterWorksWithPublicEventAsync() + { + // Arrange + OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!; + this._kernelBuilder.AddOpenAIChatCompletion( + modelId: configuration.ModelId!, + apiKey: configuration.ApiKey); + + Kernel kernel = this._kernelBuilder.Build(); + + // Create the outer process + var processBuilder = this.CreateLinearProcess("Outer"); + + // Create the inner process and add it as a step to the outer process + var nestedProcessStep = processBuilder.AddStepFromProcess(this.CreateLinearProcess("Inner")); + + // Add a new external event to start the outer process and handoff to the inner process directly + processBuilder.OnInputEvent(ProcessTestsEvents.StartInnerProcess) + .SendEventTo(nestedProcessStep.WhereInputEventIs(ProcessTestsEvents.StartProcess)); + + // Route the last step of the inner process to trigger the echo step of the outer process + nestedProcessStep.OnEvent(ProcessTestsEvents.OutputReadyPublic) + .SendEventTo(new ProcessFunctionTargetBuilder(processBuilder.Steps[0])); + + // Build the outer process + var process = processBuilder.Build(); + + // Act + string testInput = "Test"; + var processHandle = await process.StartAsync(kernel, new() { Id = ProcessTestsEvents.StartInnerProcess, Data = testInput }); + var processInfo = await processHandle.GetStateAsync(); + + // Assert + var repeatStepState = processInfo.Steps.Where(s => s.State.Name == nameof(RepeatStep)).FirstOrDefault()?.State as KernelProcessStepState; + Assert.NotNull(repeatStepState?.State); + Assert.Equal(string.Join(" ", Enumerable.Repeat(testInput, 4)), repeatStepState.State.LastMessage); + } + + /// + /// Tests a process with three steps where the third step is a nested process. Events from the inner process + /// are routed to the outer process. + /// + /// A + [Fact] + public async Task NestedProcessInnerToOuterDoesNotWorkWithInternalEventAsync() + { + // Arrange + OpenAIConfiguration configuration = this._configuration.GetSection("OpenAI").Get()!; + this._kernelBuilder.AddOpenAIChatCompletion( + modelId: configuration.ModelId!, + apiKey: configuration.ApiKey); + + Kernel kernel = this._kernelBuilder.Build(); + + // Create the outer process + var processBuilder = this.CreateLinearProcess("Outer"); + + // Create the inner process and add it as a step to the outer process + var nestedProcessStep = processBuilder.AddStepFromProcess(this.CreateLinearProcess("Inner")); + + // Add a new external event to start the outer process and handoff to the inner process directly + processBuilder.OnInputEvent(ProcessTestsEvents.StartInnerProcess) + .SendEventTo(nestedProcessStep.WhereInputEventIs(ProcessTestsEvents.StartProcess)); + + // Route the last step of the inner process to trigger the echo step of the outer process + nestedProcessStep.OnEvent(ProcessTestsEvents.OutputReadyInternal) + .SendEventTo(new ProcessFunctionTargetBuilder(processBuilder.Steps[0])); + + // Build the outer process + var process = processBuilder.Build(); + + // Act + string testInput = "Test"; + var processHandle = await process.StartAsync(kernel, new() { Id = ProcessTestsEvents.StartInnerProcess, Data = testInput }); + var processInfo = await processHandle.GetStateAsync(); + + // Assert + var repeatStepState = processInfo.Steps.Where(s => s.State.Name == nameof(RepeatStep)).FirstOrDefault()?.State as KernelProcessStepState; + Assert.NotNull(repeatStepState); + Assert.Null(repeatStepState.State?.LastMessage); + } + + /// + /// Creates a simple linear process with two steps. + /// + private ProcessBuilder CreateLinearProcess(string name) + { + var processBuilder = new ProcessBuilder(name); + var echoStep = processBuilder.AddStepFromType(); + var repeatStep = processBuilder.AddStepFromType(); + + processBuilder.OnInputEvent(ProcessTestsEvents.StartProcess) + .SendEventTo(new ProcessFunctionTargetBuilder(echoStep)); + + echoStep.OnFunctionResult(nameof(EchoStep.Echo)) + .SendEventTo(new ProcessFunctionTargetBuilder(repeatStep, parameterName: "message")); + + return processBuilder; + } + +#pragma warning disable CA1812 // Avoid uninstantiated internal classes + // These classes are dynamically instantiated by the processes used in tests. + + /// + /// A step that echos its input. + /// + private sealed class EchoStep : KernelProcessStep + { + [KernelFunction] + public string Echo(string message) => message; + } + + /// + /// A step that repeats its input. + /// + private sealed class RepeatStep : KernelProcessStep + { + private readonly StepState _state = new(); + + public override ValueTask ActivateAsync(KernelProcessStepState state) + { + state.State ??= this._state; + + return default; + } + + [KernelFunction] + public async Task RepeatAsync(string message, KernelProcessStepContext context, int count = 2) + { + var output = string.Join(" ", Enumerable.Repeat(message, count)); + this._state.LastMessage = output; + + // Emit the OnReady event with a public visibility and an internal visibility to aid in testing + await context.EmitEventAsync(new() { Id = ProcessTestsEvents.OutputReadyPublic, Data = output, Visibility = KernelProcessEventVisibility.Public }); + await context.EmitEventAsync(new() { Id = ProcessTestsEvents.OutputReadyInternal, Data = output, Visibility = KernelProcessEventVisibility.Internal }); + } + } + + /// + /// The state object for the repeat step. + /// + private sealed record StepState + { + public string? LastMessage { get; set; } + } + + /// + /// A class that defines the events that can be emitted by the chat bot process. This is + /// not required but used to ensure that the event names are consistent. + /// + private static class ProcessTestsEvents + { + public const string StartProcess = "StartProcess"; + public const string StartInnerProcess = "StartInnerProcess"; + public const string OutputReadyPublic = "OutputReadyPublic"; + public const string OutputReadyInternal = "OutputReadyInternal"; + } + +#pragma warning restore CA1812 // Avoid uninstantiated internal classes +} diff --git a/dotnet/src/IntegrationTests/PromptTests.cs b/dotnet/src/IntegrationTests/PromptTests.cs index 7b252713d24c..4435fa9a0133 100644 --- a/dotnet/src/IntegrationTests/PromptTests.cs +++ b/dotnet/src/IntegrationTests/PromptTests.cs @@ -4,12 +4,12 @@ using System.IO; using System.Reflection; using System.Threading.Tasks; +using Azure.Identity; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; -using SemanticKernel.IntegrationTests.Connectors.OpenAI; using SemanticKernel.IntegrationTests.TestSettings; using Xunit; using Xunit.Abstractions; @@ -27,7 +27,7 @@ public PromptTests(ITestOutputHelper output) .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true) .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() - .AddUserSecrets() + .AddUserSecrets() .Build(); this._kernelBuilder = Kernel.CreateBuilder(); @@ -76,16 +76,14 @@ private void ConfigureAzureOpenAI(IKernelBuilder kernelBuilder) var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get(); Assert.NotNull(azureOpenAIConfiguration); - Assert.NotNull(azureOpenAIConfiguration.DeploymentName); + Assert.NotNull(azureOpenAIConfiguration.ChatDeploymentName); Assert.NotNull(azureOpenAIConfiguration.Endpoint); - Assert.NotNull(azureOpenAIConfiguration.ApiKey); Assert.NotNull(azureOpenAIConfiguration.ServiceId); - kernelBuilder.AddAzureOpenAITextGeneration( - deploymentName: azureOpenAIConfiguration.DeploymentName, - modelId: azureOpenAIConfiguration.ModelId, + kernelBuilder.AddAzureOpenAIChatCompletion( + deploymentName: azureOpenAIConfiguration.ChatDeploymentName, endpoint: azureOpenAIConfiguration.Endpoint, - apiKey: azureOpenAIConfiguration.ApiKey, + credentials: new AzureCliCredential(), serviceId: azureOpenAIConfiguration.ServiceId); } #endregion diff --git a/dotnet/src/IntegrationTests/README.md b/dotnet/src/IntegrationTests/README.md index bc2234acda64..6227cdecb250 100644 --- a/dotnet/src/IntegrationTests/README.md +++ b/dotnet/src/IntegrationTests/README.md @@ -3,17 +3,29 @@ ## Requirements 1. **Azure OpenAI**: go to the [Azure OpenAI Quickstart](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart) - and deploy an instance of Azure OpenAI, deploy a model like "gpt-35-turbo-instruct" find your Endpoint and API key. -2. **OpenAI**: go to [OpenAI](https://platform.openai.com) to register and procure your API key. -3. **HuggingFace API key**: see https://huggingface.co/docs/huggingface_hub/guides/inference for details. -4. **Azure Bing Web Search API**: go to [Bing Web Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api) + 1. Deploy the following models: + 1. `dall-e-3` DALL-E 3 generates images and is used in Text to Image tests. + 1. `tts` TTS is a model that converts text to natural sounding speech and is used in Text to Audio tests. + 1. `whisper` The Whisper models are trained for speech recognition and translation tasks and is used in Audio to Text tests. + 1. `text-embedding-ada-002` Text Embedding Ada 002 is used in Text Embedding tests. + 1. `gpt-35-turbo-instruct` GPT-3.5 Turbo Instruct is used in inference tests. + 1. `gpt-4o` GPT-4o is used in chat completion tests. + 1. Assign users who are running the integration tests the following roles: `Cognitive Services OpenAI Contributor` and `Cognitive Services OpenAI User` + 1. Users must [Authenticate to Azure using Azure CLI](https://learn.microsoft.com/en-us/cli/azure/authenticate-azure-cli) +1. **OpenAI**: go to [OpenAI](https://platform.openai.com) to register and procure your API key. +1. **HuggingFace API key**: see https://huggingface.co/docs/huggingface_hub/guides/inference for details. +1. **Azure Bing Web Search API**: go to [Bing Web Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api) and select `Try Now` to get started. -5. **Oobabooga Text generation web UI**: Follow the [installation instructions](https://github.com/oobabooga/text-generation-webui#installation) to get a local Oobabooga instance running. Follow the [download instructions](https://github.com/oobabooga/text-generation-webui#downloading-models) to install a test model e.g. `python download-model.py gpt2`. Follow the [starting instructions](https://github.com/oobabooga/text-generation-webui#starting-the-web-ui) to start your local instance, enabling API, e.g. `python server.py --model gpt2 --listen --api --api-blocking-port "5000" --api-streaming-port "5005"`. Note that `--model` parameter is optional and models can be downloaded and hot swapped using exclusively the web UI, making it easy to test various models. -6. **Postgres**: start a postgres with the [pgvector](https://github.com/pgvector/pgvector) extension installed. You can easily do it using the docker image [ankane/pgvector](https://hub.docker.com/r/ankane/pgvector). -7. **Weaviate**: go to `IntegrationTests/Connectors/Weaviate` where `docker-compose.yml` is located and run `docker-compose up --build`. +1. **Postgres**: start a postgres with the [pgvector](https://github.com/pgvector/pgvector) extension installed. You can easily do it using the docker image [ankane/pgvector](https://hub.docker.com/r/ankane/pgvector). +1. **Weaviate**: go to `IntegrationTests/Connectors/Weaviate` where `docker-compose.yml` is located and run `docker-compose up --build`. ## Setup +> [!IMPORTANT] +> To run integration tests that depend on Azure OpenAI, you must have the Azure OpenAI models deployed and have the necessary permissions to access them. +> These test authenticate using [AzureCliCredential](https://learn.microsoft.com/en-us/dotnet/api/azure.identity.azureclicredential?view=azure-dotnet). +> Users must [Authenticate to Azure using Azure CLI](https://learn.microsoft.com/en-us/cli/azure/authenticate-azure-cli). + ### Option 1: Use Secret Manager Integration tests will require secrets and credentials, to access OpenAI, Azure OpenAI, @@ -39,31 +51,30 @@ dotnet user-secrets set "OpenAITextToImage:ServiceId" "dall-e-3" dotnet user-secrets set "OpenAITextToImage:ModelId" "dall-e-3" dotnet user-secrets set "OpenAITextToImage:ApiKey" "..." +dotnet user-secrets set "AzureAIInference:ServiceId" "azure-ai-inference" +dotnet user-secrets set "AzureAIInference:ApiKey" "..." +dotnet user-secrets set "AzureAIInference:Endpoint" "https://contoso.models.ai.azure.com/" + dotnet user-secrets set "AzureOpenAI:ServiceId" "azure-gpt-35-turbo-instruct" dotnet user-secrets set "AzureOpenAI:DeploymentName" "gpt-35-turbo-instruct" dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4" dotnet user-secrets set "AzureOpenAI:Endpoint" "https://contoso.openai.azure.com/" -dotnet user-secrets set "AzureOpenAI:ApiKey" "..." dotnet user-secrets set "AzureOpenAIEmbeddings:ServiceId" "azure-text-embedding-ada-002" dotnet user-secrets set "AzureOpenAIEmbeddings:DeploymentName" "text-embedding-ada-002" dotnet user-secrets set "AzureOpenAIEmbeddings:Endpoint" "https://contoso.openai.azure.com/" -dotnet user-secrets set "AzureOpenAIEmbeddings:ApiKey" "..." dotnet user-secrets set "AzureOpenAIAudioToText:ServiceId" "azure-audio-to-text" dotnet user-secrets set "AzureOpenAIAudioToText:DeploymentName" "whisper-1" dotnet user-secrets set "AzureOpenAIAudioToText:Endpoint" "https://contoso.openai.azure.com/" -dotnet user-secrets set "AzureOpenAIAudioToText:ApiKey" "..." dotnet user-secrets set "AzureOpenAITextToAudio:ServiceId" "azure-text-to-audio" dotnet user-secrets set "AzureOpenAITextToAudio:DeploymentName" "tts-1" dotnet user-secrets set "AzureOpenAITextToAudio:Endpoint" "https://contoso.openai.azure.com/" -dotnet user-secrets set "AzureOpenAITextToAudio:ApiKey" "..." dotnet user-secrets set "AzureOpenAITextToImage:ServiceId" "azure-text-to-image" dotnet user-secrets set "AzureOpenAITextToImage:DeploymentName" "dall-e-3" dotnet user-secrets set "AzureOpenAITextToImage:Endpoint" "https://contoso.openai.azure.com/" -dotnet user-secrets set "AzureOpenAITextToImage:ApiKey" "..." dotnet user-secrets set "MistralAI:ChatModel" "mistral-large-latest" dotnet user-secrets set "MistralAI:EmbeddingModel" "mistral-embed" diff --git a/dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json b/dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json new file mode 100644 index 000000000000..7da4cfe721d4 --- /dev/null +++ b/dotnet/src/IntegrationTests/TestData/serializedChatHistoryV1_15_1.json @@ -0,0 +1,125 @@ +๏ปฟ[ + { + "Role": { + "Label": "user" + }, + "Items": [ + { + "$type": "TextContent", + "Text": "Given the current time of day and weather, what is the likely color of the sky in Boston?" + } + ] + }, + { + "Role": { + "Label": "assistant" + }, + "Items": [ + { + "$type": "FunctionCallContent", + "Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ", + "PluginName": "HelperFunctions", + "FunctionName": "Get_Weather_For_City", + "Arguments": { + "cityName": "Boston" + } + } + ], + "ModelId": "gpt-4", + "Metadata": { + "Id": "chatcmpl-9lf5Qgx7xquKec3tc6lTn27y8Lmkz", + "Created": "2024-07-16T16:13:00+00:00", + "PromptFilterResults": [], + "SystemFingerprint": null, + "Usage": { + "CompletionTokens": 23, + "PromptTokens": 196, + "TotalTokens": 219 + }, + "ContentFilterResults": null, + "FinishReason": "tool_calls", + "FinishDetails": null, + "LogProbabilityInfo": null, + "Index": 0, + "Enhancements": null, + "ChatResponseMessage.FunctionToolCalls": [ + { + "Name": "HelperFunctions-Get_Weather_For_City", + "Arguments": "{\n \u0022cityName\u0022: \u0022Boston\u0022\n}", + "Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ" + } + ] + } + }, + { + "Role": { + "Label": "tool" + }, + "Items": [ + { + "$type": "TextContent", + "Text": "61 and rainy", + "Metadata": { + "ChatCompletionsToolCall.Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ" + } + }, + { + "$type": "FunctionResultContent", + "CallId": "call_q5FoU2fpfEyZmvC6iqtIXPYQ", + "PluginName": "HelperFunctions", + "FunctionName": "Get_Weather_For_City", + "Result": "61 and rainy" + } + ], + "Metadata": { + "ChatCompletionsToolCall.Id": "call_q5FoU2fpfEyZmvC6iqtIXPYQ" + } + }, + { + "Role": { + "Label": "assistant" + }, + "Items": [ + { + "$type": "TextContent", + "Text": "Given the current weather in Boston is 61\u00B0F and rainy, the likely color of the sky would be gray or overcast due to the presence of rain clouds.", + "ModelId": "gpt-4", + "Metadata": { + "Id": "chatcmpl-9lf5RibNr9h4bzq7JJjUXj6ITz7wN", + "Created": "2024-07-16T16:13:01+00:00", + "PromptFilterResults": [], + "SystemFingerprint": null, + "Usage": { + "CompletionTokens": 34, + "PromptTokens": 237, + "TotalTokens": 271 + }, + "ContentFilterResults": null, + "FinishReason": "stop", + "FinishDetails": null, + "LogProbabilityInfo": null, + "Index": 0, + "Enhancements": null + } + } + ], + "ModelId": "gpt-4", + "Metadata": { + "Id": "chatcmpl-9lf5RibNr9h4bzq7JJjUXj6ITz7wN", + "Created": "2024-07-16T16:13:01+00:00", + "PromptFilterResults": [], + "SystemFingerprint": null, + "Usage": { + "CompletionTokens": 34, + "PromptTokens": 237, + "TotalTokens": 271 + }, + "ContentFilterResults": null, + "FinishReason": "stop", + "FinishDetails": null, + "LogProbabilityInfo": null, + "Index": 0, + "Enhancements": null + } + } +] \ No newline at end of file diff --git a/dotnet/src/IntegrationTests/TestHelpers.cs b/dotnet/src/IntegrationTests/TestHelpers.cs index e790aa1ca26b..5b42d2884377 100644 --- a/dotnet/src/IntegrationTests/TestHelpers.cs +++ b/dotnet/src/IntegrationTests/TestHelpers.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Reflection; using Microsoft.SemanticKernel; +using Xunit; namespace SemanticKernel.IntegrationTests; @@ -52,4 +53,13 @@ internal static IReadOnlyKernelPluginCollection ImportSamplePromptFunctions(Kern from pluginName in pluginNames select kernel.ImportPluginFromPromptDirectory(Path.Combine(parentDirectory, pluginName))); } + + internal static void AssertChatErrorExcuseMessage(string content) + { + string[] errors = ["error", "difficult", "unable"]; + + var matchesAny = errors.Any(e => content.Contains(e, StringComparison.InvariantCultureIgnoreCase)); + + Assert.True(matchesAny); + } } diff --git a/dotnet/src/IntegrationTests/TestSettings/AzureAIInferenceConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/AzureAIInferenceConfiguration.cs new file mode 100644 index 000000000000..664effc9e3a5 --- /dev/null +++ b/dotnet/src/IntegrationTests/TestSettings/AzureAIInferenceConfiguration.cs @@ -0,0 +1,15 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace SemanticKernel.IntegrationTests.TestSettings; + +[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", + Justification = "Configuration classes are instantiated through IConfiguration.")] +internal sealed class AzureAIInferenceConfiguration(Uri endpoint, string apiKey, string? serviceId = null) +{ + public Uri Endpoint { get; set; } = endpoint; + public string? ApiKey { get; set; } = apiKey; + public string? ServiceId { get; set; } = serviceId; +} diff --git a/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs index e530110f9322..21e76b25ebdb 100644 --- a/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs +++ b/dotnet/src/IntegrationTests/TestSettings/AzureOpenAIConfiguration.cs @@ -6,7 +6,7 @@ namespace SemanticKernel.IntegrationTests.TestSettings; [SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", Justification = "Configuration classes are instantiated through IConfiguration.")] -internal sealed class AzureOpenAIConfiguration(string serviceId, string deploymentName, string endpoint, string apiKey, string? chatDeploymentName = null, string? modelId = null, string? chatModelId = null, string? embeddingModelId = null) +internal sealed class AzureOpenAIConfiguration(string serviceId, string deploymentName, string endpoint, string? apiKey = null, string? chatDeploymentName = null, string? modelId = null, string? chatModelId = null, string? embeddingModelId = null) { public string ServiceId { get; set; } = serviceId; @@ -22,5 +22,5 @@ internal sealed class AzureOpenAIConfiguration(string serviceId, string deployme public string Endpoint { get; set; } = endpoint; - public string ApiKey { get; set; } = apiKey; + public string? ApiKey { get; set; } = apiKey; } diff --git a/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs new file mode 100644 index 000000000000..51e8d77eee0a --- /dev/null +++ b/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs @@ -0,0 +1,13 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace SemanticKernel.IntegrationTests.TestSettings; + +[SuppressMessage("Performance", "CA1812:Internal class that is apparently never instantiated", + Justification = "Configuration classes are instantiated through IConfiguration.")] +internal sealed class OllamaConfiguration +{ + public string? ModelId { get; set; } + public string? Endpoint { get; set; } +} diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json index 66df73f8b7a5..e4bd00c302b6 100644 --- a/dotnet/src/IntegrationTests/testsettings.json +++ b/dotnet/src/IntegrationTests/testsettings.json @@ -2,15 +2,20 @@ "OpenAI": { "ServiceId": "gpt-3.5-turbo-instruct", "ModelId": "gpt-3.5-turbo-instruct", + "ChatModelId": "gpt-4o", "ApiKey": "" }, - "AzureOpenAI": { - "ServiceId": "azure-gpt-35-turbo-instruct", - "DeploymentName": "gpt-35-turbo-instruct", - "ChatDeploymentName": "gpt-4", + "AzureAIInference": { + "ServiceId": "azure-ai-inference", "Endpoint": "", "ApiKey": "" }, + "AzureOpenAI": { + "ServiceId": "azure-gpt", + "DeploymentName": "gpt-35-turbo-instruct", + "ChatDeploymentName": "gpt-4o", + "Endpoint": "" + }, "OpenAIEmbeddings": { "ServiceId": "text-embedding-ada-002", "ModelId": "text-embedding-ada-002", @@ -19,8 +24,7 @@ "AzureOpenAIEmbeddings": { "ServiceId": "azure-text-embedding-ada-002", "DeploymentName": "ada-002", - "Endpoint": "", - "ApiKey": "" + "Endpoint": "" }, "OpenAITextToAudio": { "ServiceId": "tts-1", @@ -30,8 +34,7 @@ "AzureOpenAITextToAudio": { "ServiceId": "azure-tts", "DeploymentName": "tts", - "Endpoint": "", - "ApiKey": "" + "Endpoint": "" }, "OpenAIAudioToText": { "ServiceId": "whisper-1", @@ -41,9 +44,18 @@ "AzureOpenAIAudioToText": { "ServiceId": "azure-whisper", "DeploymentName": "whisper", - "Endpoint": "", + "Endpoint": "" + }, + "OpenAITextToImage": { + "ServiceId": "dall-e-2", + "ModelId": "dall-e-2", "ApiKey": "" }, + "AzureOpenAITextToImage": { + "ServiceId": "azure-dalle3", + "DeploymentName": "Dalle3", + "Endpoint": "" + }, "HuggingFace": { "ApiKey": "" }, @@ -75,7 +87,10 @@ "ConnectionString": "", "VectorSearchCollection": "dotnetMSKNearestTest.nearestSearch" }, - "AzureCosmosDB": { + "AzureCosmosDBNoSQL": { + "ConnectionString": "" + }, + "AzureCosmosDBMongoDB": { "ConnectionString": "" }, "SqlServer": { diff --git a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallingUtilities.props b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallingUtilities.props new file mode 100644 index 000000000000..f731ecc3bae6 --- /dev/null +++ b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallingUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs new file mode 100644 index 000000000000..da5a1a8c808b --- /dev/null +++ b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs @@ -0,0 +1,340 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel.Connectors.FunctionCalling; + +/// +/// Class responsible for providing function calling configuration and processing AI function calls. As part of the processing, it will: +/// 1. Iterate over items representing AI model function calls in the collection. +/// 2. Look up each function in the . +/// 3. Invoke the auto function invocation filter, if registered, for each function. +/// 4. Invoke each function and add the function result to the . +/// +[ExcludeFromCodeCoverage] +internal sealed class FunctionCallsProcessor +{ + /// + /// The maximum number of auto-invokes that can be in-flight at any given time as part of the current + /// asynchronous chain of execution. + /// + /// + /// This is a fail-safe mechanism. If someone accidentally manages to set up execution settings in such a way that + /// auto-invocation is invoked recursively, and in particular where a prompt function is able to auto-invoke itself, + /// we could end up in an infinite loop. This const is a backstop against that happening. We should never come close + /// to this limit, but if we do, auto-invoke will be disabled for the current flow in order to prevent runaway execution. + /// With the current setup, the way this could possibly happen is if a prompt function is configured with built-in + /// execution settings that opt-in to auto-invocation of everything in the kernel, in which case the invocation of that + /// prompt function could advertize itself as a candidate for auto-invocation. We don't want to outright block that, + /// if that's something a developer has asked to do (e.g. it might be invoked with different arguments than its parent + /// was invoked with), but we do want to limit it. This limit is arbitrary and can be tweaked in the future and/or made + /// configurable should need arise. + /// + private const int MaxInflightAutoInvokes = 128; + + /// + /// The maximum number of function auto-invokes that can be made in a single user request. + /// + /// + /// After this number of iterations as part of a single user request is reached, auto-invocation + /// will be disabled. This is a safeguard against possible runaway execution if the model routinely re-requests + /// the same function over and over. + /// + private const int MaximumAutoInvokeAttempts = 128; + + /// Tracking for . + /// + /// It is temporarily made internal to allow code that uses the old function model to read it and decide whether to continue auto-invocation or not. + /// It should be made private when the old model is deprecated. + /// Despite the field being static, its value is unique per execution flow. So if thousands of requests hit it in parallel, each request will see its unique value. + /// + internal static readonly AsyncLocal s_inflightAutoInvokes = new(); + + /// + /// The logger. + /// + private readonly ILogger _logger; + + /// + /// Initializes a new instance of the class. + /// + /// The logger. + public FunctionCallsProcessor(ILogger? logger = null) + { + this._logger = logger ?? NullLogger.Instance; + } + + /// + /// Retrieves the configuration of the specified . + /// + /// The function choice behavior. + /// The chat history. + /// Request sequence index. + /// The . + /// The configuration of the specified . + public FunctionChoiceBehaviorConfiguration? GetConfiguration(FunctionChoiceBehavior? behavior, ChatHistory chatHistory, int requestIndex, Kernel? kernel) + { + // If no behavior is specified, return null. + if (behavior is null) + { + return null; + } + + var configuration = behavior.GetConfiguration(new(chatHistory) { Kernel = kernel, RequestSequenceIndex = requestIndex }); + + // Disable auto invocation if no kernel is provided. + configuration.AutoInvoke = kernel is not null && configuration.AutoInvoke; + + // Disable auto invocation if we've exceeded the allowed auto-invoke limit. + int maximumAutoInvokeAttempts = configuration.AutoInvoke ? MaximumAutoInvokeAttempts : 0; + if (requestIndex >= maximumAutoInvokeAttempts) + { + configuration.AutoInvoke = false; + if (this._logger!.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Maximum auto-invoke ({MaximumAutoInvoke}) reached.", maximumAutoInvokeAttempts); + } + } + // Disable auto invocation if we've exceeded the allowed limit of in-flight auto-invokes. See XML comment for the "MaxInflightAutoInvokes" const for more details. + else if (s_inflightAutoInvokes.Value >= MaxInflightAutoInvokes) + { + configuration.AutoInvoke = false; + if (this._logger!.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Maximum auto-invoke ({MaxInflightAutoInvoke}) reached.", MaxInflightAutoInvokes); + } + } + + if (configuration.Functions?.Count == 0) + { + this._logger.LogDebug("No functions provided to AI model. Function calling is disabled."); + } + + return configuration; + } + + /// + /// Processes AI function calls by iterating over the function calls, invoking them and adding the results to the chat history. + /// + /// The chat message content representing AI model response and containing function calls. + /// The chat history to add function invocation results to. + /// AI model function(s) call request sequence index. + /// Callback to check if a function was advertised to AI model or not. + /// The . + /// The to monitor for cancellation requests. + /// Last chat history message if function invocation filter requested processing termination, otherwise null. + public async Task ProcessFunctionCallsAsync( + ChatMessageContent chatMessageContent, + ChatHistory chatHistory, + int requestIndex, + Func checkIfFunctionAdvertised, + Kernel? kernel, + CancellationToken cancellationToken) + { + var functionCalls = FunctionCallContent.GetFunctionCalls(chatMessageContent).ToList(); + + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Function calls: {Calls}", functionCalls.Count); + } + if (this._logger.IsEnabled(LogLevel.Trace)) + { + var messages = new List(functionCalls.Count); + foreach (var call in functionCalls) + { + var argumentsString = call.Arguments is not null ? $"({string.Join(",", call.Arguments.Select(a => $"{a.Key}={a.Value}"))})" : "()"; + var pluginName = string.IsNullOrEmpty(call.PluginName) ? string.Empty : $"{call.PluginName}-"; + messages.Add($"{pluginName}{call.FunctionName}{argumentsString}"); + } + this._logger.LogTrace("Function calls: {Calls}", string.Join(", ", messages)); + } + + // Add the result message to the caller's chat history; + // this is required for AI model to understand the function results. + chatHistory.Add(chatMessageContent); + + // We must send back a result for every function call, regardless of whether we successfully executed it or not. + // If we successfully execute it, we'll add the result. If we don't, we'll add an error. + for (int functionCallIndex = 0; functionCallIndex < functionCalls.Count; functionCallIndex++) + { + FunctionCallContent functionCall = functionCalls[functionCallIndex]; + + // Check if the function call has an exception. + if (functionCall.Exception is not null) + { + this.AddFunctionCallResultToChatHistory(chatHistory, functionCall, result: null, errorMessage: $"Error: Function call processing failed. {functionCall.Exception.Message}"); + continue; + } + + // Make sure the requested function is one of the functions that was advertised to the AI model. + if (!checkIfFunctionAdvertised(functionCall)) + { + this.AddFunctionCallResultToChatHistory(chatHistory, functionCall, result: null, errorMessage: "Error: Function call request for a function that wasn't defined."); + continue; + } + + // Look up the function in the kernel + if (!kernel!.Plugins.TryGetFunction(functionCall.PluginName, functionCall.FunctionName, out KernelFunction? function)) + { + this.AddFunctionCallResultToChatHistory(chatHistory, functionCall, result: null, errorMessage: "Error: Requested function could not be found."); + continue; + } + + // Prepare context for the auto function invocation filter and invoke it. + FunctionResult functionResult = new(function) { Culture = kernel.Culture }; + AutoFunctionInvocationContext invocationContext = new(kernel, function, functionResult, chatHistory, chatMessageContent) + { + Arguments = functionCall.Arguments, + RequestSequenceIndex = requestIndex, + FunctionSequenceIndex = functionCallIndex, + FunctionCount = functionCalls.Count + }; + + s_inflightAutoInvokes.Value++; + try + { + invocationContext = await OnAutoFunctionInvocationAsync(kernel, invocationContext, async (context) => + { + // Check if filter requested termination. + if (context.Terminate) + { + return; + } + + // Note that we explicitly do not use executionSettings here; those pertain to the all-up operation and not necessarily to any + // further calls made as part of this function invocation. In particular, we must not use function calling settings naively here, + // as the called function could in turn telling the model about itself as a possible candidate for invocation. + context.Result = await function.InvokeAsync(kernel, invocationContext.Arguments, cancellationToken: cancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + } +#pragma warning disable CA1031 // Do not catch general exception types + catch (Exception e) +#pragma warning restore CA1031 // Do not catch general exception types + { + this.AddFunctionCallResultToChatHistory(chatHistory, functionCall, result: null, errorMessage: $"Error: Exception while invoking function. {e.Message}"); + continue; + } + finally + { + s_inflightAutoInvokes.Value--; + } + + // Apply any changes from the auto function invocation filters context to final result. + functionResult = invocationContext.Result; + + object functionResultValue = functionResult.GetValue() ?? string.Empty; + + var result = ProcessFunctionResult(functionResultValue); + + this.AddFunctionCallResultToChatHistory(chatHistory, functionCall, result); + + // If filter requested termination, return last chat history message. + if (invocationContext.Terminate) + { + if (this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Filter requested termination of automatic function invocation."); + } + + return chatHistory.Last(); + } + } + + return null; + } + + /// + /// Adds the function call result or error message to the chat history. + /// + /// The chat history to add the function call result to. + /// The function call. + /// The function result to add to the chat history. + /// The error message to add to the chat history. + private void AddFunctionCallResultToChatHistory(ChatHistory chatHistory, FunctionCallContent functionCall, string? result, string? errorMessage = null) + { + // Log any error + if (errorMessage is not null && this._logger.IsEnabled(LogLevel.Debug)) + { + this._logger.LogDebug("Failed to handle function request ({Id}). {Error}", functionCall.Id, errorMessage); + } + + result ??= errorMessage ?? string.Empty; + + var message = new ChatMessageContent(role: AuthorRole.Tool, content: result); + message.Items.Add(new FunctionResultContent(functionCall.FunctionName, functionCall.PluginName, functionCall.Id, result)); + + chatHistory.Add(message); + } + + /// + /// Invokes the auto function invocation filters. + /// + /// The . + /// The auto function invocation context. + /// The function to call after the filters. + /// The auto function invocation context. + private static async Task OnAutoFunctionInvocationAsync( + Kernel kernel, + AutoFunctionInvocationContext context, + Func functionCallCallback) + { + await InvokeFilterOrFunctionAsync(kernel.AutoFunctionInvocationFilters, functionCallCallback, context).ConfigureAwait(false); + + return context; + } + + /// + /// This method will execute auto function invocation filters and function recursively. + /// If there are no registered filters, just function will be executed. + /// If there are registered filters, filter on position will be executed. + /// Second parameter of filter is callback. It can be either filter on + 1 position or function if there are no remaining filters to execute. + /// Function will be always executed as last step after all filters. + /// + private static async Task InvokeFilterOrFunctionAsync( + IList? autoFunctionInvocationFilters, + Func functionCallCallback, + AutoFunctionInvocationContext context, + int index = 0) + { + if (autoFunctionInvocationFilters is { Count: > 0 } && index < autoFunctionInvocationFilters.Count) + { + await autoFunctionInvocationFilters[index].OnAutoFunctionInvocationAsync(context, + (context) => InvokeFilterOrFunctionAsync(autoFunctionInvocationFilters, functionCallCallback, context, index + 1)).ConfigureAwait(false); + } + else + { + await functionCallCallback(context).ConfigureAwait(false); + } + } + + /// + /// Processes the function result. + /// + /// The result of the function call. + /// A string representation of the function result. + public static string? ProcessFunctionResult(object functionResult) + { + if (functionResult is string stringResult) + { + return stringResult; + } + + // This is an optimization to use ChatMessageContent content directly + // without unnecessary serialization of the whole message content class. + if (functionResult is ChatMessageContent chatMessageContent) + { + return chatMessageContent.ToString(); + } + + return JsonSerializer.Serialize(functionResult); + } +} diff --git a/dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs b/dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs new file mode 100644 index 000000000000..feca5e79618c --- /dev/null +++ b/dotnet/src/InternalUtilities/openai/Extensions/ClientResultExceptionExtensions.cs @@ -0,0 +1,39 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.Diagnostics.CodeAnalysis; +using System.Net; +using Microsoft.SemanticKernel; + +/// +/// Provides extension methods for the class. +/// +[ExcludeFromCodeCoverage] +internal static class ClientResultExceptionExtensions +{ + /// + /// Converts a to an . + /// + /// The original . + /// An instance. + public static HttpOperationException ToHttpOperationException(this ClientResultException exception) + { + const int NoResponseReceived = 0; + + string? responseContent = null; + + try + { + responseContent = exception.GetRawResponse()?.Content.ToString(); + } +#pragma warning disable CA1031 // Do not catch general exception types + catch { } // We want to suppress any exceptions that occur while reading the content, ensuring that an HttpOperationException is thrown instead. +#pragma warning restore CA1031 + + return new HttpOperationException( + exception.Status == NoResponseReceived ? null : (HttpStatusCode?)exception.Status, + responseContent, + exception.Message, + exception); + } +} diff --git a/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props b/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props new file mode 100644 index 000000000000..e865b7fe40e9 --- /dev/null +++ b/dotnet/src/InternalUtilities/openai/OpenAIUtilities.props @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs b/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs new file mode 100644 index 000000000000..8ee5865edc2c --- /dev/null +++ b/dotnet/src/InternalUtilities/openai/Policies/GeneratedActionPipelinePolicy.cs @@ -0,0 +1,39 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading.Tasks; + +/// +/// Generic action pipeline policy for processing messages. +/// +[ExcludeFromCodeCoverage] +internal sealed class GenericActionPipelinePolicy : PipelinePolicy +{ + private readonly Action _processMessageAction; + + internal GenericActionPipelinePolicy(Action processMessageAction) + { + this._processMessageAction = processMessageAction; + } + + public override void Process(PipelineMessage message, IReadOnlyList pipeline, int currentIndex) + { + this._processMessageAction(message); + if (currentIndex < pipeline.Count - 1) + { + pipeline[currentIndex + 1].Process(message, pipeline, currentIndex + 1); + } + } + + public override async ValueTask ProcessAsync(PipelineMessage message, IReadOnlyList pipeline, int currentIndex) + { + this._processMessageAction(message); + if (currentIndex < pipeline.Count - 1) + { + await pipeline[currentIndex + 1].ProcessAsync(message, pipeline, currentIndex + 1).ConfigureAwait(false); + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs new file mode 100644 index 000000000000..27bcf684b3be --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseAgentsTest.cs @@ -0,0 +1,131 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.Collections.ObjectModel; +using System.Diagnostics; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Files; + +/// +/// Base class for samples that demonstrate the usage of agents. +/// +public abstract class BaseAgentsTest(ITestOutputHelper output) : BaseTest(output) +{ + /// + /// Metadata key to indicate the assistant as created for a sample. + /// + protected const string AssistantSampleMetadataKey = "sksample"; + + /// + /// Metadata to indicate the assistant as created for a sample. + /// + /// + /// While the samples do attempt delete the assistants it creates, it is possible + /// that some assistants may remain. This metadata can be used to identify and sample + /// agents for clean-up. + /// + protected static readonly ReadOnlyDictionary AssistantSampleMetadata = + new(new Dictionary + { + { AssistantSampleMetadataKey, bool.TrueString } + }); + + /// + /// Provide a according to the configuration settings. + /// + protected OpenAIClientProvider GetClientProvider() + => + this.UseOpenAIConfig ? + OpenAIClientProvider.ForOpenAI(new ApiKeyCredential(this.ApiKey)) : + OpenAIClientProvider.ForAzureOpenAI(new ApiKeyCredential(this.ApiKey), new Uri(this.Endpoint!)); + + /// + /// Common method to write formatted agent chat content to the console. + /// + protected void WriteAgentChatMessage(ChatMessageContent message) + { + // Include ChatMessageContent.AuthorName in output, if present. + string authorExpression = message.Role == AuthorRole.User ? string.Empty : $" - {message.AuthorName ?? "*"}"; + // Include TextContent (via ChatMessageContent.Content), if present. + string contentExpression = string.IsNullOrWhiteSpace(message.Content) ? string.Empty : message.Content; + bool isCode = message.Metadata?.ContainsKey(OpenAIAssistantAgent.CodeInterpreterMetadataKey) ?? false; + string codeMarker = isCode ? "\n [CODE]\n" : " "; + Console.WriteLine($"\n# {message.Role}{authorExpression}:{codeMarker}{contentExpression}"); + + // Provide visibility for inner content (that isn't TextContent). + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + Console.WriteLine($" [{item.GetType().Name}] {annotation.Quote}: File #{annotation.FileId}"); + } + else if (item is FileReferenceContent fileReference) + { + Console.WriteLine($" [{item.GetType().Name}] File #{fileReference.FileId}"); + } + else if (item is ImageContent image) + { + Console.WriteLine($" [{item.GetType().Name}] {image.Uri?.ToString() ?? image.DataUri ?? $"{image.Data?.Length} bytes"}"); + } + else if (item is FunctionCallContent functionCall) + { + Console.WriteLine($" [{item.GetType().Name}] {functionCall.Id}"); + } + else if (item is FunctionResultContent functionResult) + { + Console.WriteLine($" [{item.GetType().Name}] {functionResult.CallId}"); + } + } + } + + protected async Task DownloadResponseContentAsync(OpenAIFileClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is AnnotationContent annotation) + { + await this.DownloadFileContentAsync(client, annotation.FileId!); + } + } + } + + protected async Task DownloadResponseImageAsync(OpenAIFileClient client, ChatMessageContent message) + { + foreach (KernelContent item in message.Items) + { + if (item is FileReferenceContent fileReference) + { + await this.DownloadFileContentAsync(client, fileReference.FileId, launchViewer: true); + } + } + } + + private async Task DownloadFileContentAsync(OpenAIFileClient client, string fileId, bool launchViewer = false) + { + OpenAIFile fileInfo = client.GetFile(fileId); + if (fileInfo.Purpose == FilePurpose.AssistantsOutput) + { + string filePath = Path.Combine(Path.GetTempPath(), Path.GetFileName(fileInfo.Filename)); + if (launchViewer) + { + filePath = Path.ChangeExtension(filePath, ".png"); + } + + BinaryData content = await client.DownloadFileAsync(fileId); + File.WriteAllBytes(filePath, content.ToArray()); + Console.WriteLine($" File #{fileId} saved to: {filePath}"); + + if (launchViewer) + { + Process.Start( + new ProcessStartInfo + { + FileName = "cmd.exe", + Arguments = $"/C start {filePath}" + }); + } + } + } +} diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs index d71d3c1f0032..3b5c8841226f 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs @@ -1,11 +1,13 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System.Reflection; +using System.Text; using System.Text.Json; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; -public abstract class BaseTest +public abstract class BaseTest : TextWriter { /// /// Flag to force usage of OpenAI configuration if both @@ -58,7 +60,7 @@ protected Kernel CreateKernelWithChatCompletion() return builder.Build(); } - protected BaseTest(ITestOutputHelper output) + protected BaseTest(ITestOutputHelper output, bool redirectSystemConsoleOutput = false) { this.Output = output; this.LoggerFactory = new XunitLogger(output); @@ -70,37 +72,44 @@ protected BaseTest(ITestOutputHelper output) .Build(); TestConfiguration.Initialize(configRoot); + + // Redirect System.Console output to the test output if requested + if (redirectSystemConsoleOutput) + { + System.Console.SetOut(this); + } } - /// - /// This method can be substituted by Console.WriteLine when used in Console apps. - /// - /// Target object to write - public void WriteLine(object? target = null) - => this.Output.WriteLine(target ?? string.Empty); + /// + public override void WriteLine(object? value = null) + => this.Output.WriteLine(value ?? string.Empty); - /// - /// This method can be substituted by Console.WriteLine when used in Console apps. - /// - /// Format string - /// Arguments - public void WriteLine(string? format, params object?[] args) - => this.Output.WriteLine(format ?? string.Empty, args); + /// + public override void WriteLine(string? format, params object?[] arg) + => this.Output.WriteLine(format ?? string.Empty, arg); - /// - /// This method can be substituted by Console.WriteLine when used in Console apps. - /// - /// The message - public void WriteLine(string? message) - => this.Output.WriteLine(message ?? string.Empty); + /// + public override void WriteLine(string? value) + => this.Output.WriteLine(value ?? string.Empty); + + /// + public override void Write(object? value = null) + => this.Output.WriteLine(value ?? string.Empty); + + /// + public override Encoding Encoding => Encoding.UTF8; /// - /// Current interface ITestOutputHelper does not have a Write method. This extension method adds it to make it analogous to Console.Write when used in Console apps. + /// Outputs the last message in the chat history. /// - /// Target object to write - public void Write(object? target = null) - => this.Output.WriteLine(target ?? string.Empty); + /// Chat history + protected void OutputLastMessage(ChatHistory chatHistory) + { + var message = chatHistory.Last(); + Console.WriteLine($"{message.Role}: {message.Content}"); + Console.WriteLine("------------------------"); + } protected sealed class LoggingHandler(HttpMessageHandler innerHandler, ITestOutputHelper output) : DelegatingHandler(innerHandler) { private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { WriteIndented = true }; diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs index 1a86413a5e05..01b60b08c9cb 100644 --- a/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs +++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/TestConfiguration.cs @@ -19,8 +19,11 @@ public static void Initialize(IConfigurationRoot configRoot) s_instance = new TestConfiguration(configRoot); } + public static OllamaConfig Ollama => LoadSection(); public static OpenAIConfig OpenAI => LoadSection(); + public static OnnxConfig Onnx => LoadSection(); public static AzureOpenAIConfig AzureOpenAI => LoadSection(); + public static AzureAIInferenceConfig AzureAIInference => LoadSection(); public static AzureOpenAIConfig AzureOpenAIImages => LoadSection(); public static AzureOpenAIEmbeddingsConfig AzureOpenAIEmbeddings => LoadSection(); public static AzureAISearchConfig AzureAISearch => LoadSection(); @@ -71,6 +74,22 @@ public class OpenAIConfig public string ApiKey { get; set; } } + public class AzureAIInferenceConfig + { + public string ServiceId { get; set; } + public string Endpoint { get; set; } + public string? ApiKey { get; set; } + } + + public class OnnxConfig + { + public string ModelId { get; set; } + public string ModelPath { get; set; } + public string EmbeddingModelId { get; set; } + public string EmbeddingModelPath { get; set; } + public string EmbeddingVocabPath { get; set; } + } + public class AzureOpenAIConfig { public string ServiceId { get; set; } @@ -220,6 +239,14 @@ public class GeminiConfig } } + public class OllamaConfig + { + public string? ModelId { get; set; } + public string? EmbeddingModelId { get; set; } + + public string Endpoint { get; set; } = "http://localhost:11434"; + } + public class AzureCosmosDbMongoDbConfig { public string ConnectionString { get; set; } diff --git a/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props b/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props index 0c47e16d8d93..df5205c40a82 100644 --- a/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props +++ b/dotnet/src/InternalUtilities/samples/SamplesInternalUtilities.props @@ -1,5 +1,8 @@ - + + \ No newline at end of file diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs new file mode 100644 index 000000000000..3eef2591274d --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordMapping.cs @@ -0,0 +1,184 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Reflection; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Contains helper methods to map between storage and data models. +/// +[ExcludeFromCodeCoverage] +internal static class VectorStoreRecordMapping +{ + /// + /// Loop through the list of objects and for each one look up the storage name + /// in the and check if the value exists in the . + /// If so, set the value on the record object. + /// + /// The type of the storage properties. + /// The type of the target object. + /// The target object to set the property values on. + /// objects listing the properties on the data model to get values for. + /// Storage property names keyed by data property names. + /// A dictionary of storage values by storage property name. + /// An optional function to convert the storage property values to data property values. + public static void SetValuesOnProperties( + TRecord record, + IEnumerable dataModelPropertiesInfo, + IReadOnlyDictionary dataModelToStorageNameMapping, + IReadOnlyDictionary storageValues, + Func? storageValueConverter = null) + where TRecord : class + { + var propertiesInfoWithValues = BuildPropertiesInfoWithValues( + dataModelPropertiesInfo, + dataModelToStorageNameMapping, + storageValues, + storageValueConverter); + + SetPropertiesOnRecord(record, propertiesInfoWithValues); + } + + /// + /// Build a list of properties with their values from the given data model properties and storage values. + /// + /// The type of the storage properties. + /// objects listing the properties on the data model to get values for. + /// Storage property names keyed by data property names. + /// A dictionary of storage values by storage property name. + /// An optional function to convert the storage property values to data property values. + /// The list of data property objects and their values. + public static IEnumerable> BuildPropertiesInfoWithValues( + IEnumerable dataModelPropertiesInfo, + IReadOnlyDictionary dataModelToStorageNameMapping, + IReadOnlyDictionary storageValues, + Func? storageValueConverter = null) + { + foreach (var propertyInfo in dataModelPropertiesInfo) + { + if (dataModelToStorageNameMapping.TryGetValue(propertyInfo.Name, out var storageName) && + storageValues.TryGetValue(storageName, out var storageValue)) + { + if (storageValueConverter is not null) + { + var convertedStorageValue = storageValueConverter(storageValue, propertyInfo.PropertyType); + yield return new KeyValuePair(propertyInfo, convertedStorageValue); + } + else + { + yield return new KeyValuePair(propertyInfo, (object?)storageValue); + } + } + } + } + + /// + /// Set the given list of properties with their values on the given object. + /// + /// The type of the target object. + /// The target object to set the property values on. + /// A list of properties and their values to set. + public static void SetPropertiesOnRecord( + TRecord record, + IEnumerable> propertiesInfoWithValues) + where TRecord : class + { + foreach (var propertyInfoWithValue in propertiesInfoWithValues) + { + propertyInfoWithValue.Key.SetValue(record, propertyInfoWithValue.Value); + } + } + + /// + /// Create an enumerable of the required type from the input enumerable. + /// + /// The type of elements in the input enumerable. + /// The input enumerable to convert. + /// The type to convert to. + /// The new enumerable in the required type. + /// Thrown when a target type is requested that is not supported. + public static object? CreateEnumerable(IEnumerable input, Type requiredEnumerable) + { + if (input is null) + { + return null; + } + + // If the required type is an array, we can create an ArrayList of the required type, add all + // items from the input, and then convert the ArrayList to an array of the required type. + if (requiredEnumerable.IsArray) + { + if (requiredEnumerable.HasElementType) + { + var elementType = requiredEnumerable.GetElementType(); + + var arrayList = new ArrayList(); + foreach (var item in input) + { + arrayList.Add(item); + } + return arrayList.ToArray(elementType!); + } + + return input.ToArray(); + } + + // If the required type is one of a few supported generic collection interface types that + // are all implemented by List<>, we can create a LIst<> and add all items from the input. + if (requiredEnumerable.IsGenericType) + { + var genericTypeDefinition = requiredEnumerable.GetGenericTypeDefinition(); + if (genericTypeDefinition == typeof(ICollection<>) || + genericTypeDefinition == typeof(IEnumerable<>) || + genericTypeDefinition == typeof(IList<>) || + genericTypeDefinition == typeof(IReadOnlyCollection<>) || + genericTypeDefinition == typeof(IReadOnlyList<>)) + { + // Create a List<> using the generic type argument of the required enumerable. + var genericMemberType = requiredEnumerable.GetGenericArguments()[0]; + var listType = typeof(List<>).MakeGenericType(genericMemberType); + var enumerableType = typeof(IEnumerable<>).MakeGenericType(genericMemberType); + var constructor = listType.GetConstructor([]); + var list = (IList)constructor!.Invoke(null); + + // Add all items from the input into the new list. + foreach (var item in input) + { + list.Add(item); + } + return list; + } + } + + // If the required type is IEnumerable, we can return the input as is. + if (requiredEnumerable == typeof(IEnumerable)) + { + return input; + } + + // If our required type implements IList and has a public parameterless constructor, we can + // create an instance of it using reflection and add all items from the input. + if (typeof(IList).IsAssignableFrom(requiredEnumerable)) + { + var publicParameterlessConstructor = requiredEnumerable.GetConstructor([]); + if (publicParameterlessConstructor is not null) + { + // Create the required type using the parameterless constructor and cast + // it to an IList so we can add our input items. + var list = (IList)publicParameterlessConstructor.Invoke(null); + foreach (var item in input) + { + list.Add(item); + } + return list; + } + } + + throw new NotSupportedException($"Type {requiredEnumerable.FullName} is not supported."); + } +} diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs index d4f06071f66b..338e53e3af75 100644 --- a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReader.cs @@ -1,8 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; -using System.Collections; -using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Linq; @@ -16,48 +14,377 @@ namespace Microsoft.SemanticKernel.Data; /// Contains helpers for reading vector store model properties and their attributes. /// [ExcludeFromCodeCoverage] -internal static class VectorStoreRecordPropertyReader +#pragma warning disable CA1812 // Used in some projects but not all, so need to suppress to avoid warnings in those it's not used in. +internal sealed class VectorStoreRecordPropertyReader +#pragma warning restore CA1812 { - /// Cache of property enumerations so that we don't incur reflection costs with each invocation. - private static readonly ConcurrentDictionary dataProperties, List vectorProperties)> s_singleVectorPropertiesCache = new(); + /// The of the data model. + private readonly Type _dataModelType; - /// Cache of property enumerations so that we don't incur reflection costs with each invocation. - private static readonly ConcurrentDictionary dataProperties, List vectorProperties)> s_multipleVectorsPropertiesCache = new(); + /// A definition of the current storage model. + private readonly VectorStoreRecordDefinition _vectorStoreRecordDefinition; + + /// Options for configuring the behavior of this class. + private readonly VectorStoreRecordPropertyReaderOptions _options; + + /// The key properties from the definition. + private readonly List _keyProperties; + + /// The data properties from the definition. + private readonly List _dataProperties; + + /// The vector properties from the definition. + private readonly List _vectorProperties; + + /// The of the parameterless constructor from the data model if one exists. + private readonly Lazy _parameterlessConstructorInfo; + + /// The key objects from the data model. + private List? _keyPropertiesInfo; + + /// The data objects from the data model. + private List? _dataPropertiesInfo; + + /// The vector objects from the data model. + private List? _vectorPropertiesInfo; + + /// A lazy initialized map of data model property names to the names under which they are stored in the data store. + private readonly Lazy> _storagePropertyNamesMap; + + /// A lazy initialized list of storage names of key properties. + private readonly Lazy> _keyPropertyStoragePropertyNames; + + /// A lazy initialized list of storage names of data properties. + private readonly Lazy> _dataPropertyStoragePropertyNames; + + /// A lazy initialized list of storage names of vector properties. + private readonly Lazy> _vectorPropertyStoragePropertyNames; + + /// A lazy initialized map of data model property names to the names they will have if serialized to JSON. + private readonly Lazy> _jsonPropertyNamesMap; + + /// A lazy initialized list of json names of key properties. + private readonly Lazy> _keyPropertyJsonNames; + + /// A lazy initialized list of json names of data properties. + private readonly Lazy> _dataPropertyJsonNames; + + /// A lazy initialized list of json names of vector properties. + private readonly Lazy> _vectorPropertyJsonNames; + + public VectorStoreRecordPropertyReader( + Type dataModelType, + VectorStoreRecordDefinition? vectorStoreRecordDefinition, + VectorStoreRecordPropertyReaderOptions? options) + { + this._dataModelType = dataModelType; + this._options = options ?? new VectorStoreRecordPropertyReaderOptions(); + + // If a definition is provided, use it. Otherwise, create one from the type. + if (vectorStoreRecordDefinition is not null) + { + // Here we received a definition, which gives us all of the information we need. + // Some mappers though need to set properties on the data model using reflection + // so we may still need to find the PropertyInfo objects on the data model later if required. + this._vectorStoreRecordDefinition = vectorStoreRecordDefinition; + } + else + { + // Here we didn't receive a definition, so we need to derive the information from + // the data model. Since we may need the PropertyInfo objects later to read or write + // property values on the data model, we save them for later in case we need them. + var propertiesInfo = FindPropertiesInfo(dataModelType); + this._vectorStoreRecordDefinition = CreateVectorStoreRecordDefinitionFromType(propertiesInfo); + + this._keyPropertiesInfo = propertiesInfo.KeyProperties; + this._dataPropertiesInfo = propertiesInfo.DataProperties; + this._vectorPropertiesInfo = propertiesInfo.VectorProperties; + } + + // Verify the definition to make sure it does not have too many or too few of each property type. + (this._keyProperties, this._dataProperties, this._vectorProperties) = SplitDefinitionAndVerify( + dataModelType.Name, + this._vectorStoreRecordDefinition, + this._options.SupportsMultipleKeys, + this._options.SupportsMultipleVectors, + this._options.RequiresAtLeastOneVector); + + // Setup lazy initializers. + this._storagePropertyNamesMap = new Lazy>(() => + { + return BuildPropertyNameToStorageNameMap((this._keyProperties, this._dataProperties, this._vectorProperties)); + }); + + this._parameterlessConstructorInfo = new Lazy(() => + { + var constructor = dataModelType.GetConstructor(Type.EmptyTypes); + if (constructor == null) + { + throw new ArgumentException($"Type {dataModelType.FullName} must have a parameterless constructor."); + } + + return constructor; + }); + + this._keyPropertyStoragePropertyNames = new Lazy>(() => + { + var storagePropertyNames = this._storagePropertyNamesMap.Value; + return this._keyProperties.Select(x => storagePropertyNames[x.DataModelPropertyName]).ToList(); + }); + + this._dataPropertyStoragePropertyNames = new Lazy>(() => + { + var storagePropertyNames = this._storagePropertyNamesMap.Value; + return this._dataProperties.Select(x => storagePropertyNames[x.DataModelPropertyName]).ToList(); + }); + + this._vectorPropertyStoragePropertyNames = new Lazy>(() => + { + var storagePropertyNames = this._storagePropertyNamesMap.Value; + return this._vectorProperties.Select(x => storagePropertyNames[x.DataModelPropertyName]).ToList(); + }); + + this._jsonPropertyNamesMap = new Lazy>(() => + { + return BuildPropertyNameToJsonPropertyNameMap( + (this._keyProperties, this._dataProperties, this._vectorProperties), + dataModelType, + this._options.JsonSerializerOptions); + }); + + this._keyPropertyJsonNames = new Lazy>(() => + { + var jsonPropertyNamesMap = this._jsonPropertyNamesMap.Value; + return this._keyProperties.Select(x => jsonPropertyNamesMap[x.DataModelPropertyName]).ToList(); + }); + + this._dataPropertyJsonNames = new Lazy>(() => + { + var jsonPropertyNamesMap = this._jsonPropertyNamesMap.Value; + return this._dataProperties.Select(x => jsonPropertyNamesMap[x.DataModelPropertyName]).ToList(); + }); + + this._vectorPropertyJsonNames = new Lazy>(() => + { + var jsonPropertyNamesMap = this._jsonPropertyNamesMap.Value; + return this._vectorProperties.Select(x => jsonPropertyNamesMap[x.DataModelPropertyName]).ToList(); + }); + } + + /// Gets the record definition of the current storage model. + public VectorStoreRecordDefinition RecordDefinition => this._vectorStoreRecordDefinition; + + /// Gets the list of properties from the record definition. + public IReadOnlyList Properties => this._vectorStoreRecordDefinition.Properties; + + /// Gets the first object from the record definition that was provided or that was generated from the data model. + public VectorStoreRecordKeyProperty KeyProperty => this._keyProperties[0]; + + /// Gets all objects from the record definition that was provided or that was generated from the data model. + public IReadOnlyList KeyProperties => this._keyProperties; + + /// Gets all objects from the record definition that was provided or that was generated from the data model. + public IReadOnlyList DataProperties => this._dataProperties; + + /// Gets the first objects from the record definition that was provided or that was generated from the data model. + public VectorStoreRecordVectorProperty? VectorProperty => this._vectorProperties.Count > 0 ? this._vectorProperties[0] : null; + + /// Gets all objects from the record definition that was provided or that was generated from the data model. + public IReadOnlyList VectorProperties => this._vectorProperties; + + /// Gets the parameterless constructor if one exists, throws otherwise. + public ConstructorInfo ParameterLessConstructorInfo => this._parameterlessConstructorInfo.Value; + + /// Gets the first key property info object. + public PropertyInfo KeyPropertyInfo + { + get + { + this.LoadPropertyInfoIfNeeded(); + return this._keyPropertiesInfo![0]; + } + } + + /// Gets the key property info objects. + public IReadOnlyList KeyPropertiesInfo + { + get + { + this.LoadPropertyInfoIfNeeded(); + return this._keyPropertiesInfo!; + } + } + + /// Gets the data property info objects. + public IReadOnlyList DataPropertiesInfo + { + get + { + this.LoadPropertyInfoIfNeeded(); + return this._dataPropertiesInfo!; + } + } + + /// Gets the vector property info objects. + public IReadOnlyList VectorPropertiesInfo + { + get + { + this.LoadPropertyInfoIfNeeded(); + return this._vectorPropertiesInfo!; + } + } + + /// Gets the name of the first vector property in the definition or null if there are no vectors. + public string? FirstVectorPropertyName => this._vectorProperties.FirstOrDefault()?.DataModelPropertyName; + + /// Gets the first vector PropertyInfo object in the data model or null if there are no vectors. + public PropertyInfo? FirstVectorPropertyInfo => this.VectorPropertiesInfo.Count > 0 ? this.VectorPropertiesInfo[0] : null; + + /// Gets the property name of the first key property in the definition. + public string KeyPropertyName => this._keyProperties[0].DataModelPropertyName; + + /// Gets the storage name of the first key property in the definition. + public string KeyPropertyStoragePropertyName => this._keyPropertyStoragePropertyNames.Value[0]; + + /// Gets the storage names of all the properties in the definition. + public IReadOnlyDictionary StoragePropertyNamesMap => this._storagePropertyNamesMap.Value; + + /// Gets the storage names of the key properties in the definition. + public IReadOnlyList KeyPropertyStoragePropertyNames => this._keyPropertyStoragePropertyNames.Value; + + /// Gets the storage names of the data properties in the definition. + public IReadOnlyList DataPropertyStoragePropertyNames => this._dataPropertyStoragePropertyNames.Value; + + /// Gets the storage names of the vector properties in the definition. + public IReadOnlyList VectorPropertyStoragePropertyNames => this._vectorPropertyStoragePropertyNames.Value; + + /// Gets the json name of the first key property in the definition. + public string KeyPropertyJsonName => this.KeyPropertyJsonNames[0]; + + /// Gets the json names of the key properties in the definition. + public IReadOnlyList KeyPropertyJsonNames => this._keyPropertyJsonNames.Value; + + /// Gets the json names of the data properties in the definition. + public IReadOnlyList DataPropertyJsonNames => this._dataPropertyJsonNames.Value; + + /// Gets the json names of the vector properties in the definition. + public IReadOnlyList VectorPropertyJsonNames => this._vectorPropertyJsonNames.Value; + + /// A map of data model property names to the names they will have if serialized to JSON. + public IReadOnlyDictionary JsonPropertyNamesMap => this._jsonPropertyNamesMap.Value; + + /// Verify that the data model has a parameterless constructor. + public void VerifyHasParameterlessConstructor() + { + var constructorInfo = this._parameterlessConstructorInfo.Value; + } + + /// Verify that the types of the key properties fall within the provided set. + /// The list of supported types. + public void VerifyKeyProperties(HashSet supportedTypes) + { + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._keyProperties, supportedTypes, "Key"); + } + + /// Verify that the types of the data properties fall within the provided set. + /// The list of supported types. + /// A value indicating whether enumerable types are supported where the element type is one of the supported types. + public void VerifyDataProperties(HashSet supportedTypes, bool supportEnumerable) + { + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._dataProperties, supportedTypes, "Data", supportEnumerable); + } + + /// Verify that the types of the data properties fall within the provided set. + /// The list of supported types. + /// A value indicating whether enumerable types are supported where the element type is one of the supported types. + public void VerifyDataProperties(HashSet supportedTypes, HashSet supportedEnumerableElementTypes) + { + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._dataProperties, supportedTypes, supportedEnumerableElementTypes, "Data"); + } + + /// Verify that the types of the vector properties fall within the provided set. + /// The list of supported types. + public void VerifyVectorProperties(HashSet supportedTypes) + { + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._vectorProperties, supportedTypes, "Vector"); + } + + /// + /// Get the storage property name for the given data model property name. + /// + /// The data model property name for which to get the storage property name. + /// The storage property name. + public string GetStoragePropertyName(string dataModelPropertyName) + { + return this._storagePropertyNamesMap.Value[dataModelPropertyName]; + } + + /// + /// Get the name under which a property will be stored if serialized to JSON + /// + /// The data model property name for which to get the JSON name. + /// The JSON name. + public string GetJsonPropertyName(string dataModelPropertyName) + { + return this._jsonPropertyNamesMap.Value[dataModelPropertyName]; + } + + /// + /// Check if we have previously loaded the objects from the data model and if not, load them. + /// + private void LoadPropertyInfoIfNeeded() + { + if (this._keyPropertiesInfo != null) + { + return; + } + + // If we previously built the definition from the data model, the PropertyInfo objects + // from the data model would already be saved. If we didn't though, there could be a mismatch + // between what is defined in the definition and what is in the data model. Therefore, this + // method will throw if any property in the definition is not on the data model. + var propertiesInfo = FindPropertiesInfo(this._dataModelType, this._vectorStoreRecordDefinition); + + this._keyPropertiesInfo = propertiesInfo.KeyProperties; + this._dataPropertiesInfo = propertiesInfo.DataProperties; + this._vectorPropertiesInfo = propertiesInfo.VectorProperties; + } /// /// Split the given into key, data and vector properties and verify that we have the expected numbers of each type. /// /// The name of the type that the definition relates to. /// The to split. + /// A value indicating whether multiple key properties are supported. /// A value indicating whether multiple vectors are supported. /// A value indicating whether we need at least one vector. /// The properties on the split into key, data and vector groupings. /// Thrown if there are any validation failures with the provided . - public static (VectorStoreRecordKeyProperty KeyProperty, List DataProperties, List VectorProperties) SplitDefinitionAndVerify( + private static (List KeyProperties, List DataProperties, List VectorProperties) SplitDefinitionAndVerify( string typeName, VectorStoreRecordDefinition definition, + bool supportsMultipleKeys, bool supportsMultipleVectors, bool requiresAtLeastOneVector) { var keyProperties = definition.Properties.OfType().ToList(); + var dataProperties = definition.Properties.OfType().ToList(); + var vectorProperties = definition.Properties.OfType().ToList(); - if (keyProperties.Count > 1) + if (keyProperties.Count > 1 && !supportsMultipleKeys) { throw new ArgumentException($"Multiple key properties found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)}."); } - var keyProperty = keyProperties.FirstOrDefault(); - var dataProperties = definition.Properties.OfType().ToList(); - var vectorProperties = definition.Properties.OfType().ToList(); - - if (keyProperty is null) + if (keyProperties.Count == 0) { throw new ArgumentException($"No key property found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)}."); } if (requiresAtLeastOneVector && vectorProperties.Count == 0) { - throw new ArgumentException($"No vector property found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)}."); + throw new ArgumentException($"No vector property found on type {typeName} or the provided {nameof(VectorStoreRecordDefinition)} while at least one is required."); } if (!supportsMultipleVectors && vectorProperties.Count > 1) @@ -65,7 +392,7 @@ public static (VectorStoreRecordKeyProperty KeyProperty, List @@ -74,34 +401,19 @@ public static (VectorStoreRecordKeyProperty KeyProperty, List /// The data model to find the properties on. - /// A value indicating whether multiple vector properties are supported instead of just one. /// The categorized properties. - public static (PropertyInfo KeyProperty, List DataProperties, List VectorProperties) FindProperties(Type type, bool supportsMultipleVectors) + private static (List KeyProperties, List DataProperties, List VectorProperties) FindPropertiesInfo(Type type) { - var cache = supportsMultipleVectors ? s_multipleVectorsPropertiesCache : s_singleVectorPropertiesCache; - - // First check the cache. - if (cache.TryGetValue(type, out var cachedProperties)) - { - return cachedProperties; - } - - PropertyInfo? keyProperty = null; + List keyProperties = new(); List dataProperties = new(); List vectorProperties = new(); - bool singleVectorPropertyFound = false; foreach (var property in type.GetProperties()) { // Get Key property. if (property.GetCustomAttribute() is not null) { - if (keyProperty is not null) - { - throw new ArgumentException($"Multiple key properties found on type {type.FullName}."); - } - - keyProperty = property; + keyProperties.Add(property); } // Get data properties. @@ -113,73 +425,39 @@ public static (PropertyInfo KeyProperty, List DataProperties, List // Get Vector properties. if (property.GetCustomAttribute() is not null) { - // Add all vector properties if we support multiple vectors. - if (supportsMultipleVectors) - { - vectorProperties.Add(property); - } - // Add only one vector property if we don't support multiple vectors. - else if (!singleVectorPropertyFound) - { - vectorProperties.Add(property); - singleVectorPropertyFound = true; - } - else - { - throw new ArgumentException($"Multiple vector properties found on type {type.FullName} while only one is supported."); - } + vectorProperties.Add(property); } } - // Check that we have a key property. - if (keyProperty is null) - { - throw new ArgumentException($"No key property found on type {type.FullName}."); - } - - // Check that we have one vector property if we don't have named vectors. - if (!supportsMultipleVectors && !singleVectorPropertyFound) - { - throw new ArgumentException($"No vector property found on type {type.FullName}."); - } - - // Update the cache. - cache[type] = (keyProperty, dataProperties, vectorProperties); - - return (keyProperty, dataProperties, vectorProperties); + return (keyProperties, dataProperties, vectorProperties); } /// /// Find the properties listed in the on the and verify - /// that they exist and that we have the expected numbers of each type. + /// that they exist. /// Return those properties in separate categories. /// /// The data model to find the properties on. /// The property configuration. - /// A value indicating whether multiple vector properties are supported instead of just one. /// The categorized properties. - public static (PropertyInfo KeyProperty, List DataProperties, List VectorProperties) FindProperties(Type type, VectorStoreRecordDefinition vectorStoreRecordDefinition, bool supportsMultipleVectors) + public static (List KeyProperties, List DataProperties, List VectorProperties) FindPropertiesInfo(Type type, VectorStoreRecordDefinition vectorStoreRecordDefinition) { - PropertyInfo? keyProperty = null; + List keyProperties = new(); List dataProperties = new(); List vectorProperties = new(); - bool singleVectorPropertyFound = false; foreach (VectorStoreRecordProperty property in vectorStoreRecordDefinition.Properties) { // Key. if (property is VectorStoreRecordKeyProperty keyPropertyInfo) { - if (keyProperty is not null) - { - throw new ArgumentException($"Multiple key properties configured for type {type.FullName}."); - } - - keyProperty = type.GetProperty(keyPropertyInfo.DataModelPropertyName); + var keyProperty = type.GetProperty(keyPropertyInfo.DataModelPropertyName); if (keyProperty == null) { throw new ArgumentException($"Key property '{keyPropertyInfo.DataModelPropertyName}' not found on type {type.FullName}."); } + + keyProperties.Add(keyProperty); } // Data. else if (property is VectorStoreRecordDataProperty dataPropertyInfo) @@ -201,21 +479,7 @@ public static (PropertyInfo KeyProperty, List DataProperties, List throw new ArgumentException($"Vector property '{vectorPropertyInfo.DataModelPropertyName}' not found on type {type.FullName}."); } - // Add all vector properties if we support multiple vectors. - if (supportsMultipleVectors) - { - vectorProperties.Add(vectorProperty); - } - // Add only one vector property if we don't support multiple vectors. - else if (!singleVectorPropertyFound) - { - vectorProperties.Add(vectorProperty); - singleVectorPropertyFound = true; - } - else - { - throw new ArgumentException($"Multiple vector properties configured for type {type.FullName} while only one is supported."); - } + vectorProperties.Add(vectorProperty); } else { @@ -223,38 +487,33 @@ public static (PropertyInfo KeyProperty, List DataProperties, List } } - // Check that we have a key property. - if (keyProperty is null) - { - throw new ArgumentException($"No key property configured for type {type.FullName}."); - } - - // Check that we have one vector property if we don't have named vectors. - if (!supportsMultipleVectors && !singleVectorPropertyFound) - { - throw new ArgumentException($"No vector property configured for type {type.FullName}."); - } - - return (keyProperty!, dataProperties, vectorProperties); + return (keyProperties, dataProperties, vectorProperties); } /// - /// Create a by reading the attributes on the properties of the given type. + /// Create a by reading the attributes on the provided objects. /// - /// The type to create the definition for. - /// if the store supports multiple vectors, otherwise. - /// The based on the given type. - public static VectorStoreRecordDefinition CreateVectorStoreRecordDefinitionFromType(Type type, bool supportsMultipleVectors) + /// objects to build a from. + /// The based on the given objects. + private static VectorStoreRecordDefinition CreateVectorStoreRecordDefinitionFromType((List KeyProperties, List DataProperties, List VectorProperties) propertiesInfo) { - var properties = FindProperties(type, supportsMultipleVectors); var definitionProperties = new List(); - // Key property. - var keyAttribute = properties.KeyProperty.GetCustomAttribute(); - definitionProperties.Add(new VectorStoreRecordKeyProperty(properties.KeyProperty.Name, properties.KeyProperty.PropertyType) { StoragePropertyName = keyAttribute!.StoragePropertyName }); + // Key properties. + foreach (var keyProperty in propertiesInfo.KeyProperties) + { + var keyAttribute = keyProperty.GetCustomAttribute(); + if (keyAttribute is not null) + { + definitionProperties.Add(new VectorStoreRecordKeyProperty(keyProperty.Name, keyProperty.PropertyType) + { + StoragePropertyName = keyAttribute.StoragePropertyName + }); + } + } // Data properties. - foreach (var dataProperty in properties.DataProperties) + foreach (var dataProperty in propertiesInfo.DataProperties) { var dataAttribute = dataProperty.GetCustomAttribute(); if (dataAttribute is not null) @@ -269,7 +528,7 @@ public static VectorStoreRecordDefinition CreateVectorStoreRecordDefinitionFromT } // Vector properties. - foreach (var vectorProperty in properties.VectorProperties) + foreach (var vectorProperty in propertiesInfo.VectorProperties) { var vectorAttribute = vectorProperty.GetCustomAttribute(); if (vectorAttribute is not null) @@ -288,183 +547,50 @@ public static VectorStoreRecordDefinition CreateVectorStoreRecordDefinitionFromT } /// - /// Verify that the given properties are of the supported types. - /// - /// The properties to check. - /// A set of supported types that the provided properties may have. - /// A description of the category of properties being checked. Used for error messaging. - /// A value indicating whether versions of all the types should also be supported. - /// Thrown if any of the properties are not in the given set of types. - public static void VerifyPropertyTypes(List properties, HashSet supportedTypes, string propertyCategoryDescription, bool? supportEnumerable = false) - { - var supportedEnumerableTypes = supportEnumerable == true - ? supportedTypes - : []; - - VerifyPropertyTypes(properties, supportedTypes, supportedEnumerableTypes, propertyCategoryDescription); - } - - /// - /// Verify that the given properties are of the supported types. - /// - /// The properties to check. - /// A set of supported types that the provided properties may have. - /// A set of supported types that the provided enumerable properties may use as their element type. - /// A description of the category of properties being checked. Used for error messaging. - /// Thrown if any of the properties are not in the given set of types. - public static void VerifyPropertyTypes(List properties, HashSet supportedTypes, HashSet supportedEnumerableTypes, string propertyCategoryDescription) - { - foreach (var property in properties) - { - VerifyPropertyType(property.Name, property.PropertyType, supportedTypes, supportedEnumerableTypes, propertyCategoryDescription); - } - } - - /// - /// Verify that the given properties are of the supported types. - /// - /// The properties to check. - /// A set of supported types that the provided properties may have. - /// A description of the category of properties being checked. Used for error messaging. - /// A value indicating whether versions of all the types should also be supported. - /// Thrown if any of the properties are not in the given set of types. - public static void VerifyPropertyTypes(IEnumerable properties, HashSet supportedTypes, string propertyCategoryDescription, bool? supportEnumerable = false) - { - var supportedEnumerableTypes = supportEnumerable == true - ? supportedTypes - : []; - - VerifyPropertyTypes(properties, supportedTypes, supportedEnumerableTypes, propertyCategoryDescription); - } - - /// - /// Verify that the given properties are of the supported types. - /// - /// The properties to check. - /// A set of supported types that the provided properties may have. - /// A set of supported types that the provided enumerable properties may use as their element type. - /// A description of the category of properties being checked. Used for error messaging. - /// Thrown if any of the properties are not in the given set of types. - public static void VerifyPropertyTypes(IEnumerable properties, HashSet supportedTypes, HashSet supportedEnumerableTypes, string propertyCategoryDescription) - { - foreach (var property in properties) - { - VerifyPropertyType(property.DataModelPropertyName, property.PropertyType, supportedTypes, supportedEnumerableTypes, propertyCategoryDescription); - } - } - - /// - /// Verify that the given property is of the supported types. - /// - /// The name of the property being checked. Used for error messaging. - /// The type of the property being checked. - /// A set of supported types that the provided property may have. - /// A set of supported types that the provided property may use as its element type if it's enumerable. - /// A description of the category of property being checked. Used for error messaging. - /// Thrown if the property is not in the given set of types. - public static void VerifyPropertyType(string propertyName, Type propertyType, HashSet supportedTypes, HashSet supportedEnumerableTypes, string propertyCategoryDescription) - { - // Add shortcut before testing all the more expensive scenarios. - if (supportedTypes.Contains(propertyType)) - { - return; - } - - // Check all collection scenarios and get stored type. - if (supportedEnumerableTypes.Count > 0 && typeof(IEnumerable).IsAssignableFrom(propertyType)) - { - var typeToCheck = propertyType switch - { - IEnumerable => typeof(object), - var enumerableType when enumerableType.IsGenericType && enumerableType.GetGenericTypeDefinition() == typeof(IEnumerable<>) => enumerableType.GetGenericArguments()[0], - var arrayType when arrayType.IsArray => arrayType.GetElementType()!, - var interfaceType when interfaceType.GetInterfaces().FirstOrDefault(i => i.IsGenericType && i.GetGenericTypeDefinition() == typeof(IEnumerable<>)) is Type enumerableInterface => - enumerableInterface.GetGenericArguments()[0], - _ => propertyType - }; - - if (!supportedEnumerableTypes.Contains(typeToCheck)) - { - var supportedEnumerableElementTypesString = string.Join(", ", supportedEnumerableTypes!.Select(t => t.FullName)); - throw new ArgumentException($"Enumerable {propertyCategoryDescription} properties must have one of the supported element types: {supportedEnumerableElementTypesString}. Element type of the property '{propertyName}' is {typeToCheck.FullName}."); - } - } - else - { - // if we got here, we know the type is not supported - var supportedTypesString = string.Join(", ", supportedTypes.Select(t => t.FullName)); - throw new ArgumentException($"{propertyCategoryDescription} properties must be one of the supported types: {supportedTypesString}. Type of the property '{propertyName}' is {propertyType.FullName}."); - } - } - - /// - /// Get the JSON property name of a property by using the if available, otherwise - /// using the if available, otherwise falling back to the property name. - /// The provided may not actually contain the property, e.g. when the user has a data model that - /// doesn't resemble the stored data and where they are using a custom mapper. + /// Build a map of property names to the names under which they should be saved in storage, for the given properties. /// - /// The property to retrieve a storage name for. - /// The data model type that the property belongs to. - /// The options used for JSON serialization. - /// The JSON storage property name. - public static string GetJsonPropertyName(VectorStoreRecordProperty property, Type dataModel, JsonSerializerOptions options) + /// The properties to build the map for. + /// The map from property names to the names under which they should be saved in storage. + private static Dictionary BuildPropertyNameToStorageNameMap((List keyProperties, List dataProperties, List vectorProperties) properties) { - var propertyInfo = dataModel.GetProperty(property.DataModelPropertyName); - - if (propertyInfo != null) - { - var jsonPropertyNameAttribute = propertyInfo.GetCustomAttribute(); - if (jsonPropertyNameAttribute is not null) - { - return jsonPropertyNameAttribute.Name; - } - } + var storagePropertyNameMap = new Dictionary(); - if (options.PropertyNamingPolicy is not null) + foreach (var keyProperty in properties.keyProperties) { - return options.PropertyNamingPolicy.ConvertName(property.DataModelPropertyName); + storagePropertyNameMap.Add(keyProperty.DataModelPropertyName, keyProperty.StoragePropertyName ?? keyProperty.DataModelPropertyName); } - return property.DataModelPropertyName; - } - - /// - /// Get the JSON property name of a property by using the if available, otherwise - /// using the if available, otherwise falling back to the property name. - /// - /// The options used for JSON serialization. - /// The property to retrieve a storage name for. - /// The JSON storage property name. - public static string GetJsonPropertyName(JsonSerializerOptions options, PropertyInfo property) - { - var jsonPropertyNameAttribute = property.GetCustomAttribute(); - if (jsonPropertyNameAttribute is not null) + foreach (var dataProperty in properties.dataProperties) { - return jsonPropertyNameAttribute.Name; + storagePropertyNameMap.Add(dataProperty.DataModelPropertyName, dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName); } - if (options.PropertyNamingPolicy is not null) + foreach (var vectorProperty in properties.vectorProperties) { - return options.PropertyNamingPolicy.ConvertName(property.Name); + storagePropertyNameMap.Add(vectorProperty.DataModelPropertyName, vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName); } - return property.Name; + return storagePropertyNameMap; } /// - /// Build a map of property names to the names under which they should be saved in storage if using JSON serialization. + /// Build a map of property names to the names that they would have if serialized to JSON. /// /// The properties to build the map for. /// The data model type that the property belongs to. /// The options used for JSON serialization. - /// The map from property names to the names under which they should be saved in storage if using JSON serialization. - public static Dictionary BuildPropertyNameToJsonPropertyNameMap( - (VectorStoreRecordKeyProperty keyProperty, List dataProperties, List vectorProperties) properties, + /// The map from property names to the names that they would have if serialized to JSON. + private static Dictionary BuildPropertyNameToJsonPropertyNameMap( + (List keyProperties, List dataProperties, List vectorProperties) properties, Type dataModel, JsonSerializerOptions options) { var jsonPropertyNameMap = new Dictionary(); - jsonPropertyNameMap.Add(properties.keyProperty.DataModelPropertyName, GetJsonPropertyName(properties.keyProperty, dataModel, options)); + + foreach (var keyProperty in properties.keyProperties) + { + jsonPropertyNameMap.Add(keyProperty.DataModelPropertyName, GetJsonPropertyName(keyProperty, dataModel, options)); + } foreach (var dataProperty in properties.dataProperties) { @@ -480,53 +606,33 @@ public static Dictionary BuildPropertyNameToJsonPropertyNameMap( } /// - /// Build a map of property names to the names under which they should be saved in storage if using JSON serialization. + /// Get the JSON property name of a property by using the if available, otherwise + /// using the if available, otherwise falling back to the property name. + /// The provided may not actually contain the property, e.g. when the user has a data model that + /// doesn't resemble the stored data and where they are using a custom mapper. /// - /// The properties to build the map for. + /// The property to retrieve a JSON name for. /// The data model type that the property belongs to. /// The options used for JSON serialization. - /// The map from property names to the names under which they should be saved in storage if using JSON serialization. - public static Dictionary BuildPropertyNameToJsonPropertyNameMap( - (PropertyInfo keyProperty, List dataProperties, List vectorProperties) properties, - Type dataModel, - JsonSerializerOptions options) - { - var jsonPropertyNameMap = new Dictionary(); - jsonPropertyNameMap.Add(properties.keyProperty.Name, GetJsonPropertyName(options, properties.keyProperty)); - - foreach (var dataProperty in properties.dataProperties) - { - jsonPropertyNameMap.Add(dataProperty.Name, GetJsonPropertyName(options, dataProperty)); - } - - foreach (var vectorProperty in properties.vectorProperties) - { - jsonPropertyNameMap.Add(vectorProperty.Name, GetJsonPropertyName(options, vectorProperty)); - } - - return jsonPropertyNameMap; - } - - /// - /// Build a map of property names to the names under which they should be saved in storage, for the given properties. - /// - /// The properties to build the map for. - /// The map from property names to the names under which they should be saved in storage. - public static Dictionary BuildPropertyNameToStorageNameMap((VectorStoreRecordKeyProperty keyProperty, List dataProperties, List vectorProperties) properties) + /// The JSON property name. + private static string GetJsonPropertyName(VectorStoreRecordProperty property, Type dataModel, JsonSerializerOptions options) { - var storagePropertyNameMap = new Dictionary(); - storagePropertyNameMap.Add(properties.keyProperty.DataModelPropertyName, properties.keyProperty.StoragePropertyName ?? properties.keyProperty.DataModelPropertyName); + var propertyInfo = dataModel.GetProperty(property.DataModelPropertyName); - foreach (var dataProperty in properties.dataProperties) + if (propertyInfo != null) { - storagePropertyNameMap.Add(dataProperty.DataModelPropertyName, dataProperty.StoragePropertyName ?? dataProperty.DataModelPropertyName); + var jsonPropertyNameAttribute = propertyInfo.GetCustomAttribute(); + if (jsonPropertyNameAttribute is not null) + { + return jsonPropertyNameAttribute.Name; + } } - foreach (var vectorProperty in properties.vectorProperties) + if (options.PropertyNamingPolicy is not null) { - storagePropertyNameMap.Add(vectorProperty.DataModelPropertyName, vectorProperty.StoragePropertyName ?? vectorProperty.DataModelPropertyName); + return options.PropertyNamingPolicy.ConvertName(property.DataModelPropertyName); } - return storagePropertyNameMap; + return property.DataModelPropertyName; } } diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs new file mode 100644 index 000000000000..67ef4a6fcaac --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyReaderOptions.cs @@ -0,0 +1,33 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Contains options for . +/// +[ExcludeFromCodeCoverage] +internal sealed class VectorStoreRecordPropertyReaderOptions +{ + /// + /// Gets or sets a value indicating whether the connector/db supports multiple key properties. + /// + public bool SupportsMultipleKeys { get; set; } = false; + + /// + /// Gets or sets a value indicating whether the connector/db supports multiple vector properties. + /// + public bool SupportsMultipleVectors { get; set; } = true; + + /// + /// Gets or sets a value indicating whether the connector/db requires at least one vector property. + /// + public bool RequiresAtLeastOneVector { get; set; } = false; + + /// + /// Gets or sets the json serializer options that the connector might be using for storage serialization. + /// + public JsonSerializerOptions JsonSerializerOptions = JsonSerializerOptions.Default; +} diff --git a/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs new file mode 100644 index 000000000000..48344cb9e437 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Data/VectorStoreRecordPropertyVerification.cs @@ -0,0 +1,224 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Reflection; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// Contains helpers for verifying the types of vector store record properties. +/// +[ExcludeFromCodeCoverage] +internal static class VectorStoreRecordPropertyVerification +{ + /// + /// Verify that the given properties are of the supported types. + /// + /// The properties to check. + /// A set of supported types that the provided properties may have. + /// A description of the category of properties being checked. Used for error messaging. + /// A value indicating whether versions of all the types should also be supported. + /// Thrown if any of the properties are not in the given set of types. + public static void VerifyPropertyTypes(List properties, HashSet supportedTypes, string propertyCategoryDescription, bool? supportEnumerable = false) + { + var supportedEnumerableElementTypes = supportEnumerable == true + ? supportedTypes + : []; + + VerifyPropertyTypes(properties, supportedTypes, supportedEnumerableElementTypes, propertyCategoryDescription); + } + + /// + /// Verify that the given properties are of the supported types. + /// + /// The properties to check. + /// A set of supported types that the provided properties may have. + /// A set of supported types that the provided enumerable properties may use as their element type. + /// A description of the category of properties being checked. Used for error messaging. + /// Thrown if any of the properties are not in the given set of types. + public static void VerifyPropertyTypes(List properties, HashSet supportedTypes, HashSet supportedEnumerableElementTypes, string propertyCategoryDescription) + { + foreach (var property in properties) + { + VerifyPropertyType(property.Name, property.PropertyType, supportedTypes, supportedEnumerableElementTypes, propertyCategoryDescription); + } + } + + /// + /// Verify that the given properties are of the supported types. + /// + /// The properties to check. + /// A set of supported types that the provided properties may have. + /// A description of the category of properties being checked. Used for error messaging. + /// A value indicating whether versions of all the types should also be supported. + /// Thrown if any of the properties are not in the given set of types. + public static void VerifyPropertyTypes(IEnumerable properties, HashSet supportedTypes, string propertyCategoryDescription, bool? supportEnumerable = false) + { + var supportedEnumerableElementTypes = supportEnumerable == true + ? supportedTypes + : []; + + VerifyPropertyTypes(properties, supportedTypes, supportedEnumerableElementTypes, propertyCategoryDescription); + } + + /// + /// Verify that the given properties are of the supported types. + /// + /// The properties to check. + /// A set of supported types that the provided properties may have. + /// A set of supported types that the provided enumerable properties may use as their element type. + /// A description of the category of properties being checked. Used for error messaging. + /// Thrown if any of the properties are not in the given set of types. + public static void VerifyPropertyTypes(IEnumerable properties, HashSet supportedTypes, HashSet supportedEnumerableElementTypes, string propertyCategoryDescription) + { + foreach (var property in properties) + { + VerifyPropertyType(property.DataModelPropertyName, property.PropertyType, supportedTypes, supportedEnumerableElementTypes, propertyCategoryDescription); + } + } + + /// + /// Verify that the given property is of the supported types. + /// + /// The name of the property being checked. Used for error messaging. + /// The type of the property being checked. + /// A set of supported types that the provided property may have. + /// A set of supported types that the provided property may use as its element type if it's enumerable. + /// A description of the category of property being checked. Used for error messaging. + /// Thrown if the property is not in the given set of types. + public static void VerifyPropertyType(string propertyName, Type propertyType, HashSet supportedTypes, HashSet supportedEnumerableElementTypes, string propertyCategoryDescription) + { + // Add shortcut before testing all the more expensive scenarios. + if (supportedTypes.Contains(propertyType)) + { + return; + } + + // Check all collection scenarios and get stored type. + if (supportedEnumerableElementTypes.Count > 0 && IsSupportedEnumerableType(propertyType)) + { + var typeToCheck = GetCollectionElementType(propertyType); + + if (!supportedEnumerableElementTypes.Contains(typeToCheck)) + { + var supportedEnumerableElementTypesString = string.Join(", ", supportedEnumerableElementTypes!.Select(t => t.FullName)); + throw new ArgumentException($"Enumerable {propertyCategoryDescription} properties must have one of the supported element types: {supportedEnumerableElementTypesString}. Element type of the property '{propertyName}' is {typeToCheck.FullName}."); + } + } + else + { + // if we got here, we know the type is not supported + var supportedTypesString = string.Join(", ", supportedTypes.Select(t => t.FullName)); + throw new ArgumentException($"{propertyCategoryDescription} properties must be one of the supported types: {supportedTypesString}. Type of the property '{propertyName}' is {propertyType.FullName}."); + } + } + + /// + /// Verify if the provided type is one of the supported Enumerable types. + /// + /// The type to check. + /// if the type is a supported Enumerable, otherwise. + public static bool IsSupportedEnumerableType(Type type) + { + if (type.IsArray || type == typeof(IEnumerable)) + { + return true; + } + + if (typeof(IList).IsAssignableFrom(type) && type.GetConstructor([]) != null) + { + return true; + } + + if (type.IsGenericType) + { + var genericTypeDefinition = type.GetGenericTypeDefinition(); + if (genericTypeDefinition == typeof(ICollection<>) || + genericTypeDefinition == typeof(IEnumerable<>) || + genericTypeDefinition == typeof(IList<>) || + genericTypeDefinition == typeof(IReadOnlyCollection<>) || + genericTypeDefinition == typeof(IReadOnlyList<>)) + { + return true; + } + } + + return false; + } + + /// + /// Returns of collection elements. + /// + public static Type GetCollectionElementType(Type collectionType) + { + return collectionType switch + { + IEnumerable => typeof(object), + var enumerableType when enumerableType.IsGenericType && enumerableType.GetGenericTypeDefinition() == typeof(IEnumerable<>) => enumerableType.GetGenericArguments()[0], + var arrayType when arrayType.IsArray => arrayType.GetElementType()!, + var interfaceType when interfaceType.GetInterfaces().FirstOrDefault(i => i.IsGenericType && i.GetGenericTypeDefinition() == typeof(IEnumerable<>)) is Type enumerableInterface => + enumerableInterface.GetGenericArguments()[0], + _ => collectionType + }; + } + + /// + /// Checks that if the provided is a that the key type is supported by the default mappers. + /// If not supported, a custom mapper must be supplied, otherwise an exception is thrown. + /// + /// The type of the record data model used by the connector. + /// A value indicating whether a custom mapper was supplied to the connector + /// The list of key types supported by the default mappers. + /// Thrown if the key type of the is not supported by the default mappers and a custom mapper was not supplied. + public static void VerifyGenericDataModelKeyType(Type recordType, bool customMapperSupplied, IEnumerable allowedKeyTypes) + { + // If we are not dealing with a generic data model, no need to check anything else. + if (!recordType.IsGenericType || recordType.GetGenericTypeDefinition() != typeof(VectorStoreGenericDataModel<>)) + { + return; + } + + // If the key type is supported, we are good. + var keyType = recordType.GetGenericArguments()[0]; + if (allowedKeyTypes.Contains(keyType)) + { + return; + } + + // If the key type is not supported out of the box, but a custom mapper was supplied, we are good. + if (customMapperSupplied) + { + return; + } + + throw new ArgumentException($"The key type '{keyType.FullName}' of data model '{nameof(VectorStoreGenericDataModel)}' is not supported by the default mappers. " + + $"Only the following key types are supported: {string.Join(", ", allowedKeyTypes)}. Please provide your own mapper to map to your chosen key type."); + } + + /// + /// Checks that if the provided is a that a is also provided. + /// + /// The type of the record data model used by the connector. + /// A value indicating whether a record definition was supplied to the connector. + /// Thrown if a is not provided when using . + public static void VerifyGenericDataModelDefinitionSupplied(Type recordType, bool recordDefinitionSupplied) + { + // If we are not dealing with a generic data model, no need to check anything else. + if (!recordType.IsGenericType || recordType.GetGenericTypeDefinition() != typeof(VectorStoreGenericDataModel<>)) + { + return; + } + + // If we are dealing with a generic data model, and a record definition was supplied, we are good. + if (recordDefinitionSupplied) + { + return; + } + + throw new ArgumentException($"A {nameof(VectorStoreRecordDefinition)} must be provided when using '{nameof(VectorStoreGenericDataModel)}'."); + } +} diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs index 3425d187e4fd..e091939f0cf3 100644 --- a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs +++ b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs @@ -39,7 +39,7 @@ internal static class ModelDiagnostics /// Start a text completion activity for a given model. /// The activity will be tagged with the a set of attributes specified by the semantic conventions. /// - public static Activity? StartCompletionActivity( + internal static Activity? StartCompletionActivity( Uri? endpoint, string modelName, string modelProvider, @@ -52,7 +52,7 @@ internal static class ModelDiagnostics /// Start a chat completion activity for a given model. /// The activity will be tagged with the a set of attributes specified by the semantic conventions. /// - public static Activity? StartCompletionActivity( + internal static Activity? StartCompletionActivity( Uri? endpoint, string modelName, string modelProvider, @@ -65,20 +65,20 @@ internal static class ModelDiagnostics /// Set the text completion response for a given activity. /// The activity will be enriched with the response attributes specified by the semantic conventions. /// - public static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) + internal static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) => SetCompletionResponse(activity, completions, promptTokens, completionTokens, completions => $"[{string.Join(", ", completions)}]"); /// /// Set the chat completion response for a given activity. /// The activity will be enriched with the response attributes specified by the semantic conventions. /// - public static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) + internal static void SetCompletionResponse(this Activity activity, IEnumerable completions, int? promptTokens = null, int? completionTokens = null) => SetCompletionResponse(activity, completions, promptTokens, completionTokens, ToOpenAIFormat); /// /// Notify the end of streaming for a given activity. /// - public static void EndStreaming( + internal static void EndStreaming( this Activity activity, IEnumerable? contents, IEnumerable? toolCalls = null, @@ -98,7 +98,7 @@ public static void EndStreaming( /// The activity to set the response id /// The response id /// The activity with the response id set for chaining - public static Activity SetResponseId(this Activity activity, string responseId) => activity.SetTag(ModelDiagnosticsTags.ResponseId, responseId); + internal static Activity SetResponseId(this Activity activity, string responseId) => activity.SetTag(ModelDiagnosticsTags.ResponseId, responseId); /// /// Set the prompt token usage for a given activity. @@ -106,7 +106,7 @@ public static void EndStreaming( /// The activity to set the prompt token usage /// The number of prompt tokens used /// The activity with the prompt token usage set for chaining - public static Activity SetPromptTokenUsage(this Activity activity, int promptTokens) => activity.SetTag(ModelDiagnosticsTags.PromptToken, promptTokens); + internal static Activity SetPromptTokenUsage(this Activity activity, int promptTokens) => activity.SetTag(ModelDiagnosticsTags.PromptToken, promptTokens); /// /// Set the completion token usage for a given activity. @@ -114,13 +114,13 @@ public static void EndStreaming( /// The activity to set the completion token usage /// The number of completion tokens used /// The activity with the completion token usage set for chaining - public static Activity SetCompletionTokenUsage(this Activity activity, int completionTokens) => activity.SetTag(ModelDiagnosticsTags.CompletionToken, completionTokens); + internal static Activity SetCompletionTokenUsage(this Activity activity, int completionTokens) => activity.SetTag(ModelDiagnosticsTags.CompletionToken, completionTokens); /// /// Check if model diagnostics is enabled /// Model diagnostics is enabled if either EnableModelDiagnostics or EnableSensitiveEvents is set to true and there are listeners. /// - public static bool IsModelDiagnosticsEnabled() + internal static bool IsModelDiagnosticsEnabled() { return (s_enableDiagnostics || s_enableSensitiveEvents) && s_activitySource.HasListeners(); } @@ -129,7 +129,9 @@ public static bool IsModelDiagnosticsEnabled() /// Check if sensitive events are enabled. /// Sensitive events are enabled if EnableSensitiveEvents is set to true and there are listeners. /// - public static bool IsSensitiveEventsEnabled() => s_enableSensitiveEvents && s_activitySource.HasListeners(); + internal static bool IsSensitiveEventsEnabled() => s_enableSensitiveEvents && s_activitySource.HasListeners(); + + internal static bool HasListeners() => s_activitySource.HasListeners(); #region Private private static void AddOptionalTags(Activity? activity, TPromptExecutionSettings? executionSettings) diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaGenerationContext.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaGenerationContext.cs new file mode 100644 index 000000000000..05955507277a --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaGenerationContext.cs @@ -0,0 +1,102 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +// Source copied from https://github.com/eiriktsarpalis/stj-schema-mapper +// It should be kept in sync with any changes made in that repo, +// and should be removed once the relevant replacements are available in STJv9. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text.Json.Serialization.Metadata; + +namespace JsonSchemaMapper; + +/// +/// Defines the context in which a JSON schema within a type graph is being generated. +/// +#if EXPOSE_JSON_SCHEMA_MAPPER +public +#else +internal +#endif + readonly struct JsonSchemaGenerationContext +{ + internal JsonSchemaGenerationContext( + JsonTypeInfo typeInfo, + Type? declaringType, + JsonPropertyInfo? propertyInfo, + ParameterInfo? parameterInfo, + ICustomAttributeProvider? propertyAttributeProvider) + { + TypeInfo = typeInfo; + DeclaringType = declaringType; + PropertyInfo = propertyInfo; + ParameterInfo = parameterInfo; + PropertyAttributeProvider = propertyAttributeProvider; + } + + /// + /// The for the type being processed. + /// + public JsonTypeInfo TypeInfo { get; } + + /// + /// The declaring type of the property or parameter being processed. + /// + public Type? DeclaringType { get; } + + /// + /// The if the schema is being generated for a property. + /// + public JsonPropertyInfo? PropertyInfo { get; } + + /// + /// The if a constructor parameter + /// has been associated with the accompanying . + /// + public ParameterInfo? ParameterInfo { get; } + + /// + /// The corresponding to the property or field being processed. + /// + public ICustomAttributeProvider? PropertyAttributeProvider { get; } + + /// + /// Checks if the type, property, or parameter has the specified attribute applied. + /// + /// The type of the attribute to resolve. + /// Whether to look up the hierarchy chain for the inherited custom attribute. + /// True if the attribute is defined by the current context. + public bool IsDefined(bool inherit = false) + where TAttribute : Attribute => + GetCustomAttributes(typeof(TAttribute), inherit).Any(); + + /// + /// Checks if the type, property, or parameter has the specified attribute applied. + /// + /// The type of the attribute to resolve. + /// Whether to look up the hierarchy chain for the inherited custom attribute. + /// The first attribute resolved from the current context, or null. + public TAttribute? GetAttribute(bool inherit = false) + where TAttribute : Attribute => + (TAttribute?)GetCustomAttributes(typeof(TAttribute), inherit).FirstOrDefault(); + + /// + /// Resolves any custom attributes that might have been applied to the type, property, or parameter. + /// + /// The attribute type to resolve. + /// Whether to look up the hierarchy chain for the inherited custom attribute. + /// An enumerable of all custom attributes defined by the context. + public IEnumerable GetCustomAttributes(Type type, bool inherit = false) + { + // Resolves attributes starting from the property, then the parameter, and finally the type itself. + return GetAttrs(PropertyAttributeProvider) + .Concat(GetAttrs(ParameterInfo)) + .Concat(GetAttrs(TypeInfo.Type)) + .Cast(); + + object[] GetAttrs(ICustomAttributeProvider? provider) => + provider?.GetCustomAttributes(type, inherit) ?? Array.Empty(); + } +} diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.ReflectionHelpers.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.ReflectionHelpers.cs deleted file mode 100644 index 11dc0c6d85b7..000000000000 --- a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.ReflectionHelpers.cs +++ /dev/null @@ -1,407 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using System.Text.Json.Serialization.Metadata; - -namespace JsonSchemaMapper; - -#if EXPOSE_JSON_SCHEMA_MAPPER - public -#else -internal -#endif -static partial class JsonSchemaMapper -{ - // Uses reflection to determine the element type of an enumerable or dictionary type - // Workaround for https://github.com/dotnet/runtime/issues/77306#issuecomment-2007887560 - private static Type GetElementType(JsonTypeInfo typeInfo) - { - Debug.Assert(typeInfo.Kind is JsonTypeInfoKind.Enumerable or JsonTypeInfoKind.Dictionary); - return (Type)typeof(JsonTypeInfo).GetProperty("ElementType", BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)?.GetValue(typeInfo)!; - } - - // The source generator currently doesn't populate attribute providers for properties - // cf. https://github.com/dotnet/runtime/issues/100095 - // Work around the issue by running a query for the relevant MemberInfo using the internal MemberName property - // https://github.com/dotnet/runtime/blob/de774ff9ee1a2c06663ab35be34b755cd8d29731/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Metadata/JsonPropertyInfo.cs#L206 -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", - Justification = "We're reading the internal JsonPropertyInfo.MemberName which cannot have been trimmed away.")] -#endif - private static ICustomAttributeProvider? ResolveAttributeProvider(JsonTypeInfo typeInfo, JsonPropertyInfo propertyInfo) - { - if (propertyInfo.AttributeProvider is { } provider) - { - return provider; - } - - PropertyInfo memberNameProperty = typeof(JsonPropertyInfo).GetProperty("MemberName", BindingFlags.Instance | BindingFlags.NonPublic)!; - var memberName = (string?)memberNameProperty.GetValue(propertyInfo); - if (memberName is not null) - { - return typeInfo.Type.GetMember(memberName, MemberTypes.Property | MemberTypes.Field, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).FirstOrDefault(); - } - - return null; - } - - // Uses reflection to determine any custom converters specified for the element of a nullable type. -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2026", - Justification = "We're resolving private fields of the built-in Nullable converter which cannot have been trimmed away.")] -#endif - private static JsonConverter? ExtractCustomNullableConverter(JsonConverter? converter) - { - Debug.Assert(converter is null || IsBuiltInConverter(converter)); - - // There is unfortunately no way in which we can obtain the element converter from a nullable converter without resorting to private reflection - // https://github.com/dotnet/runtime/blob/5fda47434cecc590095e9aef3c4e560b7b7ebb47/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Converters/Value/NullableConverter.cs#L15-L17 - Type? converterType = converter?.GetType(); - if (converterType?.Name == "NullableConverter`1") - { - FieldInfo elementConverterField = converterType.GetPrivateFieldWithPotentiallyTrimmedMetadata("_elementConverter"); - return (JsonConverter)elementConverterField!.GetValue(converter)!; - } - - return null; - } - - // Uses reflection to determine serialization configuration for enum types - // cf. https://github.com/dotnet/runtime/blob/5fda47434cecc590095e9aef3c4e560b7b7ebb47/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Converters/Value/EnumConverter.cs#L23-L25 -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2026", - Justification = "We're resolving private fields of the built-in enum converter which cannot have been trimmed away.")] -#endif - private static bool TryGetStringEnumConverterValues(JsonTypeInfo typeInfo, JsonConverter converter, out JsonArray? values) - { - Debug.Assert(typeInfo.Type.IsEnum && IsBuiltInConverter(converter)); - - if (converter is JsonConverterFactory factory) - { - converter = factory.CreateConverter(typeInfo.Type, typeInfo.Options)!; - } - - Type converterType = converter.GetType(); - FieldInfo converterOptionsField = converterType.GetPrivateFieldWithPotentiallyTrimmedMetadata("_converterOptions"); - FieldInfo namingPolicyField = converterType.GetPrivateFieldWithPotentiallyTrimmedMetadata("_namingPolicy"); - - const int EnumConverterOptionsAllowStrings = 1; - var converterOptions = (int)converterOptionsField!.GetValue(converter)!; - if ((converterOptions & EnumConverterOptionsAllowStrings) != 0) - { - if (typeInfo.Type.GetCustomAttribute() is not null) - { - // For enums implemented as flags do not surface values in the JSON schema. - values = null; - } - else - { - var namingPolicy = (JsonNamingPolicy?)namingPolicyField!.GetValue(converter)!; - string[] names = Enum.GetNames(typeInfo.Type); - values = []; - foreach (string name in names) - { - string effectiveName = namingPolicy?.ConvertName(name) ?? name; - values.Add((JsonNode)effectiveName); - } - } - - return true; - } - - values = null; - return false; - } - -#if NETCOREAPP - [RequiresUnreferencedCode("Resolves unreferenced member metadata.")] -#endif - private static FieldInfo GetPrivateFieldWithPotentiallyTrimmedMetadata(this Type type, string fieldName) => - type.GetField(fieldName, BindingFlags.Instance | BindingFlags.NonPublic) ?? - throw new InvalidOperationException( - $"Could not resolve metadata for field '{fieldName}' in type '{type}'. " + - "If running Native AOT ensure that the 'IlcTrimMetadata' property has been disabled."); - - // Resolves the parameters of the deserialization constructor for a type, if they exist. -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2072:Target parameter argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", - Justification = "The deserialization constructor should have already been referenced by the source generator and therefore will not have been trimmed.")] -#endif - private static Func ResolveJsonConstructorParameterMapper(JsonTypeInfo typeInfo) - { - Debug.Assert(typeInfo.Kind is JsonTypeInfoKind.Object); - - if (typeInfo.Properties.Count > 0 && - typeInfo.CreateObject is null && // Ensure that a default constructor isn't being used - typeInfo.Type.TryGetDeserializationConstructor(useDefaultCtorInAnnotatedStructs: true, out ConstructorInfo? ctor)) - { - ParameterInfo[]? parameters = ctor?.GetParameters(); - if (parameters?.Length > 0) - { - Dictionary dict = new(parameters.Length); - foreach (ParameterInfo parameter in parameters) - { - if (parameter.Name is not null) - { - // We don't care about null parameter names or conflicts since they - // would have already been rejected by JsonTypeInfo configuration. - dict[new(parameter.Name, parameter.ParameterType)] = parameter; - } - } - - return prop => dict.TryGetValue(new(prop.Name, prop.PropertyType), out ParameterInfo? parameter) ? parameter : null; - } - } - - return static _ => null; - } - - // Parameter to property matching semantics as declared in - // https://github.com/dotnet/runtime/blob/12d96ccfaed98e23c345188ee08f8cfe211c03e7/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Metadata/JsonTypeInfo.cs#L1007-L1030 - private readonly struct ParameterLookupKey : IEquatable - { - public ParameterLookupKey(string name, Type type) - { - Name = name; - Type = type; - } - - public string Name { get; } - public Type Type { get; } - - public override int GetHashCode() => StringComparer.OrdinalIgnoreCase.GetHashCode(Name); - public bool Equals(ParameterLookupKey other) => Type == other.Type && string.Equals(Name, other.Name, StringComparison.OrdinalIgnoreCase); - public override bool Equals(object? obj) => obj is ParameterLookupKey key && Equals(key); - } - - // Resolves the deserialization constructor for a type using logic copied from - // https://github.com/dotnet/runtime/blob/e12e2fa6cbdd1f4b0c8ad1b1e2d960a480c21703/src/libraries/System.Text.Json/Common/ReflectionExtensions.cs#L227-L286 - private static bool TryGetDeserializationConstructor( -#if NETCOREAPP - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.NonPublicConstructors)] -#endif - this Type type, - bool useDefaultCtorInAnnotatedStructs, - out ConstructorInfo? deserializationCtor) - { - ConstructorInfo? ctorWithAttribute = null; - ConstructorInfo? publicParameterlessCtor = null; - ConstructorInfo? lonePublicCtor = null; - - ConstructorInfo[] constructors = type.GetConstructors(BindingFlags.Public | BindingFlags.Instance); - - if (constructors.Length == 1) - { - lonePublicCtor = constructors[0]; - } - - foreach (ConstructorInfo constructor in constructors) - { - if (HasJsonConstructorAttribute(constructor)) - { - if (ctorWithAttribute is not null) - { - deserializationCtor = null; - return false; - } - - ctorWithAttribute = constructor; - } - else if (constructor.GetParameters().Length == 0) - { - publicParameterlessCtor = constructor; - } - } - - // Search for non-public ctors with [JsonConstructor]. - foreach (ConstructorInfo constructor in type.GetConstructors(BindingFlags.NonPublic | BindingFlags.Instance)) - { - if (HasJsonConstructorAttribute(constructor)) - { - if (ctorWithAttribute is not null) - { - deserializationCtor = null; - return false; - } - - ctorWithAttribute = constructor; - } - } - - // Structs will use default constructor if attribute isn't used. - if (useDefaultCtorInAnnotatedStructs && type.IsValueType && ctorWithAttribute is null) - { - deserializationCtor = null; - return true; - } - - deserializationCtor = ctorWithAttribute ?? publicParameterlessCtor ?? lonePublicCtor; - return true; - - static bool HasJsonConstructorAttribute(ConstructorInfo constructorInfo) => - constructorInfo.GetCustomAttribute() is not null; - } - - private static bool IsBuiltInConverter(JsonConverter converter) => - converter.GetType().Assembly == typeof(JsonConverter).Assembly; - - // Resolves the nullable reference type annotations for a property or field, - // additionally addressing a few known bugs of the NullabilityInfo pre .NET 9. - private static NullabilityInfo GetMemberNullability(this NullabilityInfoContext context, MemberInfo memberInfo) - { - Debug.Assert(memberInfo is PropertyInfo or FieldInfo); - return memberInfo is PropertyInfo prop - ? context.Create(prop) - : context.Create((FieldInfo)memberInfo); - } - - private static NullabilityState GetParameterNullability(this NullabilityInfoContext context, ParameterInfo parameterInfo) - { - // Workaround for https://github.com/dotnet/runtime/issues/92487 - if (parameterInfo.GetGenericParameterDefinition() is { ParameterType: { IsGenericParameter: true } typeParam }) - { - // Step 1. Look for nullable annotations on the type parameter. - if (GetNullableFlags(typeParam) is byte[] flags) - { - return TranslateByte(flags[0]); - } - - // Step 2. Look for nullable annotations on the generic method declaration. - if (typeParam.DeclaringMethod is not null && GetNullableContextFlag(typeParam.DeclaringMethod) is byte flag) - { - return TranslateByte(flag); - } - - // Step 3. Look for nullable annotations on the generic method declaration. - if (GetNullableContextFlag(typeParam.DeclaringType!) is byte flag2) - { - return TranslateByte(flag2); - } - - // Default to nullable. - return NullabilityState.Nullable; - -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", - Justification = "We're resolving private fields of the built-in enum converter which cannot have been trimmed away.")] -#endif - static byte[]? GetNullableFlags(MemberInfo member) - { - Attribute? attr = member.GetCustomAttributes().FirstOrDefault(attr => - { - Type attrType = attr.GetType(); - return attrType.Namespace == "System.Runtime.CompilerServices" && attrType.Name == "NullableAttribute"; - }); - - return (byte[])attr?.GetType().GetField("NullableFlags")?.GetValue(attr)!; - } - -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", - Justification = "We're resolving private fields of the built-in enum converter which cannot have been trimmed away.")] -#endif - static byte? GetNullableContextFlag(MemberInfo member) - { - Attribute? attr = member.GetCustomAttributes().FirstOrDefault(attr => - { - Type attrType = attr.GetType(); - return attrType.Namespace == "System.Runtime.CompilerServices" && attrType.Name == "NullableContextAttribute"; - }); - - return (byte?)attr?.GetType().GetField("Flag")?.GetValue(attr)!; - } - - static NullabilityState TranslateByte(byte b) => - b switch - { - 1 => NullabilityState.NotNull, - 2 => NullabilityState.Nullable, - _ => NullabilityState.Unknown - }; - } - - return context.Create(parameterInfo).WriteState; - } - - private static ParameterInfo GetGenericParameterDefinition(this ParameterInfo parameter) - { - if (parameter.Member is { DeclaringType.IsConstructedGenericType: true } - or MethodInfo { IsGenericMethod: true, IsGenericMethodDefinition: false }) - { - var genericMethod = (MethodBase)parameter.Member.GetGenericMemberDefinition()!; - return genericMethod.GetParameters()[parameter.Position]; - } - - return parameter; - } - -#if NETCOREAPP - [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", - Justification = "Looking up the generic member definition of the provided member.")] -#endif - private static MemberInfo GetGenericMemberDefinition(this MemberInfo member) - { - if (member is Type type) - { - return type.IsConstructedGenericType ? type.GetGenericTypeDefinition() : type; - } - - if (member.DeclaringType!.IsConstructedGenericType) - { - const BindingFlags AllMemberFlags = - BindingFlags.Static | BindingFlags.Instance | - BindingFlags.Public | BindingFlags.NonPublic; - - return member.DeclaringType.GetGenericTypeDefinition() - .GetMember(member.Name, AllMemberFlags) - .First(m => m.MetadataToken == member.MetadataToken); - } - - if (member is MethodInfo { IsGenericMethod: true, IsGenericMethodDefinition: false } method) - { - return method.GetGenericMethodDefinition(); - } - - return member; - } - - // Taken from https://github.com/dotnet/runtime/blob/903bc019427ca07080530751151ea636168ad334/src/libraries/System.Text.Json/Common/ReflectionExtensions.cs#L288-L317 - private static object? GetNormalizedDefaultValue(this ParameterInfo parameterInfo) - { - Type parameterType = parameterInfo.ParameterType; - object? defaultValue = parameterInfo.DefaultValue; - - if (defaultValue is null) - { - return null; - } - - // DBNull.Value is sometimes used as the default value (returned by reflection) of nullable params in place of null. - if (defaultValue == DBNull.Value && parameterType != typeof(DBNull)) - { - return null; - } - - // Default values of enums or nullable enums are represented using the underlying type and need to be cast explicitly - // cf. https://github.com/dotnet/runtime/issues/68647 - if (parameterType.IsEnum) - { - return Enum.ToObject(parameterType, defaultValue); - } - - if (Nullable.GetUnderlyingType(parameterType) is Type underlyingType && underlyingType.IsEnum) - { - return Enum.ToObject(underlyingType, defaultValue); - } - - return defaultValue; - } -} diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv8.JsonSchema.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv8.JsonSchema.cs new file mode 100644 index 000000000000..5eeb37b61fee --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv8.JsonSchema.cs @@ -0,0 +1,563 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +// Source copied from https://github.com/eiriktsarpalis/stj-schema-mapper +// It should be kept in sync with any changes made in that repo, +// and should be removed once the relevant replacements are available in STJv9. + +#if !NET9_0_OR_GREATER && !SYSTEM_TEXT_JSON_V9 +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Text.Json.Nodes; + +namespace JsonSchemaMapper; + +#if EXPOSE_JSON_SCHEMA_MAPPER +public +#else +internal +#endif + static partial class JsonSchemaMapper +{ + // Simple JSON schema representation taken from System.Text.Json + // https://github.com/dotnet/runtime/blob/50d6cad649aad2bfa4069268eddd16fd51ec5cf3/src/libraries/System.Text.Json/src/System/Text/Json/Schema/JsonSchema.cs + private sealed class JsonSchema + { + public static JsonSchema False { get; } = new(false); + public static JsonSchema True { get; } = new(true); + + public JsonSchema() + { + } + + private JsonSchema(bool trueOrFalse) + { + _trueOrFalse = trueOrFalse; + } + + public bool IsTrue => _trueOrFalse is true; + public bool IsFalse => _trueOrFalse is false; + private readonly bool? _trueOrFalse; + + public string? Schema + { + get => _schema; + set + { + VerifyMutable(); + _schema = value; + } + } + + private string? _schema; + + public string? Title + { + get => _title; + set + { + VerifyMutable(); + _title = value; + } + } + + private string? _title; + + public string? Description + { + get => _description; + set + { + VerifyMutable(); + _description = value; + } + } + + private string? _description; + + public string? Ref + { + get => _ref; + set + { + VerifyMutable(); + _ref = value; + } + } + + private string? _ref; + + public string? Comment + { + get => _comment; + set + { + VerifyMutable(); + _comment = value; + } + } + + private string? _comment; + + public JsonSchemaType Type + { + get => _type; + set + { + VerifyMutable(); + _type = value; + } + } + + private JsonSchemaType _type = JsonSchemaType.Any; + + public string? Format + { + get => _format; + set + { + VerifyMutable(); + _format = value; + } + } + + private string? _format; + + public string? Pattern + { + get => _pattern; + set + { + VerifyMutable(); + _pattern = value; + } + } + + private string? _pattern; + + public JsonNode? Constant + { + get => _constant; + set + { + VerifyMutable(); + _constant = value; + } + } + + private JsonNode? _constant; + + public List>? Properties + { + get => _properties; + set + { + VerifyMutable(); + _properties = value; + } + } + + private List>? _properties; + + public List? Required + { + get => _required; + set + { + VerifyMutable(); + _required = value; + } + } + + private List? _required; + + public JsonSchema? Items + { + get => _items; + set + { + VerifyMutable(); + _items = value; + } + } + + private JsonSchema? _items; + + public JsonSchema? AdditionalProperties + { + get => _additionalProperties; + set + { + VerifyMutable(); + _additionalProperties = value; + } + } + + private JsonSchema? _additionalProperties; + + public JsonArray? Enum + { + get => _enum; + set + { + VerifyMutable(); + _enum = value; + } + } + + private JsonArray? _enum; + + public JsonSchema? Not + { + get => _not; + set + { + VerifyMutable(); + _not = value; + } + } + + private JsonSchema? _not; + + public List? AnyOf + { + get => _anyOf; + set + { + VerifyMutable(); + _anyOf = value; + } + } + + private List? _anyOf; + + public bool HasDefaultValue + { + get => _hasDefaultValue; + set + { + VerifyMutable(); + _hasDefaultValue = value; + } + } + + private bool _hasDefaultValue; + + public JsonNode? DefaultValue + { + get => _defaultValue; + set + { + VerifyMutable(); + _defaultValue = value; + } + } + + private JsonNode? _defaultValue; + + public int? MinLength + { + get => _minLength; + set + { + VerifyMutable(); + _minLength = value; + } + } + + private int? _minLength; + + public int? MaxLength + { + get => _maxLength; + set + { + VerifyMutable(); + _maxLength = value; + } + } + + private int? _maxLength; + + public JsonSchemaGenerationContext? GenerationContext { get; set; } + + public int KeywordCount + { + get + { + if (_trueOrFalse != null) + { + return 0; + } + + int count = 0; + Count(Schema != null); + Count(Ref != null); + Count(Comment != null); + Count(Title != null); + Count(Description != null); + Count(Type != JsonSchemaType.Any); + Count(Format != null); + Count(Pattern != null); + Count(Constant != null); + Count(Properties != null); + Count(Required != null); + Count(Items != null); + Count(AdditionalProperties != null); + Count(Enum != null); + Count(Not != null); + Count(AnyOf != null); + Count(HasDefaultValue); + Count(MinLength != null); + Count(MaxLength != null); + + return count; + + void Count(bool isKeywordSpecified) + { + count += isKeywordSpecified ? 1 : 0; + } + } + } + + public void MakeNullable() + { + if (_trueOrFalse != null) + { + return; + } + + if (Type != JsonSchemaType.Any) + { + Type |= JsonSchemaType.Null; + } + } + + public JsonNode ToJsonNode(JsonSchemaMapperConfiguration options) + { + if (_trueOrFalse is { } boolSchema) + { + return CompleteSchema((JsonNode)boolSchema); + } + + var objSchema = new JsonObject(); + + if (Schema != null) + { + objSchema.Add(JsonSchemaConstants.SchemaPropertyName, Schema); + } + + if (Title != null) + { + objSchema.Add(JsonSchemaConstants.TitlePropertyName, Title); + } + + if (Description != null) + { + objSchema.Add(JsonSchemaConstants.DescriptionPropertyName, Description); + } + + if (Ref != null) + { + objSchema.Add(JsonSchemaConstants.RefPropertyName, Ref); + } + + if (Comment != null) + { + objSchema.Add(JsonSchemaConstants.CommentPropertyName, Comment); + } + + if (MapSchemaType(Type) is JsonNode type) + { + objSchema.Add(JsonSchemaConstants.TypePropertyName, type); + } + + if (Format != null) + { + objSchema.Add(JsonSchemaConstants.FormatPropertyName, Format); + } + + if (Pattern != null) + { + objSchema.Add(JsonSchemaConstants.PatternPropertyName, Pattern); + } + + if (Constant != null) + { + objSchema.Add(JsonSchemaConstants.ConstPropertyName, Constant); + } + + if (Properties != null) + { + var properties = new JsonObject(); + foreach (KeyValuePair property in Properties) + { + properties.Add(property.Key, property.Value.ToJsonNode(options)); + } + + objSchema.Add(JsonSchemaConstants.PropertiesPropertyName, properties); + } + + if (Required != null) + { + var requiredArray = new JsonArray(); + foreach (string requiredProperty in Required) + { + requiredArray.Add((JsonNode)requiredProperty); + } + + objSchema.Add(JsonSchemaConstants.RequiredPropertyName, requiredArray); + } + + if (Items != null) + { + objSchema.Add(JsonSchemaConstants.ItemsPropertyName, Items.ToJsonNode(options)); + } + + if (AdditionalProperties != null) + { + objSchema.Add(JsonSchemaConstants.AdditionalPropertiesPropertyName, AdditionalProperties.ToJsonNode(options)); + } + + if (Enum != null) + { + objSchema.Add(JsonSchemaConstants.EnumPropertyName, Enum); + } + + if (Not != null) + { + objSchema.Add(JsonSchemaConstants.NotPropertyName, Not.ToJsonNode(options)); + } + + if (AnyOf != null) + { + JsonArray anyOfArray = new(); + foreach (JsonSchema schema in AnyOf) + { + anyOfArray.Add(schema.ToJsonNode(options)); + } + + objSchema.Add(JsonSchemaConstants.AnyOfPropertyName, anyOfArray); + } + + if (HasDefaultValue) + { + objSchema.Add(JsonSchemaConstants.DefaultPropertyName, DefaultValue); + } + + if (MinLength is int minLength) + { + objSchema.Add(JsonSchemaConstants.MinLengthPropertyName, (JsonNode)minLength); + } + + if (MaxLength is int maxLength) + { + objSchema.Add(JsonSchemaConstants.MaxLengthPropertyName, (JsonNode)maxLength); + } + + return CompleteSchema(objSchema); + + JsonNode CompleteSchema(JsonNode schema) + { + if (GenerationContext is { } context) + { + Debug.Assert(options.TransformSchemaNode != null, "context should only be populated if a callback is present."); + + // Apply any user-defined transformations to the schema. + return options.TransformSchemaNode!(context, schema); + } + + return schema; + } + } + + public static void EnsureMutable(ref JsonSchema schema) + { + switch (schema._trueOrFalse) + { + case false: + schema = new JsonSchema { Not = JsonSchema.True }; + break; + case true: + schema = new JsonSchema(); + break; + } + } + + private static readonly JsonSchemaType[] s_schemaValues = new JsonSchemaType[] + { + // NB the order of these values influences order of types in the rendered schema + JsonSchemaType.String, + JsonSchemaType.Integer, + JsonSchemaType.Number, + JsonSchemaType.Boolean, + JsonSchemaType.Array, + JsonSchemaType.Object, + JsonSchemaType.Null, + }; + + private void VerifyMutable() + { + Debug.Assert(_trueOrFalse is null, "Schema is not mutable"); + if (_trueOrFalse is not null) + { + Throw(); + static void Throw() => throw new InvalidOperationException(); + } + } + + private static JsonNode? MapSchemaType(JsonSchemaType schemaType) + { + if (schemaType is JsonSchemaType.Any) + { + return null; + } + + if (ToIdentifier(schemaType) is string identifier) + { + return identifier; + } + + var array = new JsonArray(); + foreach (JsonSchemaType type in s_schemaValues) + { + if ((schemaType & type) != 0) + { + array.Add((JsonNode)ToIdentifier(type)!); + } + } + + return array; + + static string? ToIdentifier(JsonSchemaType schemaType) + { + return schemaType switch + { + JsonSchemaType.Null => "null", + JsonSchemaType.Boolean => "boolean", + JsonSchemaType.Integer => "integer", + JsonSchemaType.Number => "number", + JsonSchemaType.String => "string", + JsonSchemaType.Array => "array", + JsonSchemaType.Object => "object", + _ => null, + }; + } + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + private enum JsonSchemaType + { + Any = 0, // No type declared on the schema + Null = 1, + Boolean = 2, + Integer = 4, + Number = 8, + String = 16, + Array = 32, + Object = 64, + } +} +#endif diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv8.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv8.cs new file mode 100644 index 000000000000..a74e64b94403 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv8.cs @@ -0,0 +1,959 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +// Source copied from https://github.com/eiriktsarpalis/stj-schema-mapper +// It should be kept in sync with any changes made in that repo, +// and should be removed once the relevant replacements are available in STJv9. + +#if !NET9_0_OR_GREATER && !SYSTEM_TEXT_JSON_V9 +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; +using System.Threading.Tasks; + +namespace JsonSchemaMapper; + +#if EXPOSE_JSON_SCHEMA_MAPPER +public +#else +internal +#endif + static partial class JsonSchemaMapper +{ + // For System.Text.Json versions prior to v9, JsonSchemaMapper is implemented as a standalone component. + // The implementation uses private reflection to access metadata not available with the older APIs of STJ. + // While the implementation is forward compatible with .NET 9, it is not guaranteed that it will work with + // later versions of .NET and users are encouraged to switch to the built-in JsonSchemaExporter eventually. + + private static partial JsonNode MapRootTypeJsonSchema(JsonTypeInfo typeInfo, JsonSchemaMapperConfiguration configuration) + { + GenerationState state = new(configuration, typeInfo.Options); + JsonSchema schema = MapJsonSchemaCore(ref state, typeInfo); + return schema.ToJsonNode(configuration); + } + + private static partial JsonNode MapMethodParameterJsonSchema( + ParameterInfo parameterInfo, + JsonTypeInfo parameterTypeInfo, + JsonSchemaMapperConfiguration configuration, + NullabilityInfoContext nullabilityContext, + out bool isRequired) + { + Debug.Assert(parameterInfo.Name != null); + + GenerationState state = new(configuration, parameterTypeInfo.Options, nullabilityContext); + + string? parameterDescription = null; + isRequired = false; + + ResolveParameterInfo( + parameterInfo, + parameterTypeInfo, + state.NullabilityInfoContext, + state.Configuration, + out bool hasDefaultValue, + out JsonNode? defaultValue, + out bool isNonNullableType, + ref parameterDescription, + ref isRequired); + + state.PushSchemaNode(JsonSchemaConstants.PropertiesPropertyName); + state.PushSchemaNode(parameterInfo.Name!); + + JsonSchema paramSchema = MapJsonSchemaCore( + ref state, + parameterTypeInfo, + parameterInfo: parameterInfo, + description: parameterDescription, + isNonNullableType: isNonNullableType); + + if (hasDefaultValue) + { + JsonSchema.EnsureMutable(ref paramSchema); + paramSchema.DefaultValue = defaultValue; + paramSchema.HasDefaultValue = true; + } + + state.PopSchemaNode(); + state.PopSchemaNode(); + + return paramSchema.ToJsonNode(configuration); + } + + private static JsonSchema MapJsonSchemaCore( + ref GenerationState state, + JsonTypeInfo typeInfo, + Type? parentType = null, + JsonPropertyInfo? propertyInfo = null, + ICustomAttributeProvider? propertyAttributeProvider = null, + ParameterInfo? parameterInfo = null, + bool isNonNullableType = false, + JsonConverter? customConverter = null, + JsonNumberHandling? customNumberHandling = null, + JsonTypeInfo? parentPolymorphicTypeInfo = null, + bool parentPolymorphicTypeContainsTypesWithoutDiscriminator = false, + bool parentPolymorphicTypeIsNonNullable = false, + KeyValuePair? typeDiscriminator = null, + string? description = null, + bool cacheResult = true) + { + Debug.Assert(typeInfo.IsReadOnly); + + if (cacheResult && state.TryPushType(typeInfo, propertyInfo, out string? existingJsonPointer)) + { + // We're generating the schema of a recursive type, return a reference pointing to the outermost schema. + return CompleteSchema(ref state, new JsonSchema { Ref = existingJsonPointer }); + } + + JsonSchema schema; + JsonConverter effectiveConverter = customConverter ?? typeInfo.Converter; + JsonNumberHandling effectiveNumberHandling = customNumberHandling ?? typeInfo.NumberHandling ?? typeInfo.Options.NumberHandling; + + if (!IsBuiltInConverter(effectiveConverter)) + { + // Return a `true` schema for types with user-defined converters. + return CompleteSchema(ref state, JsonSchema.True); + } + + if (state.Configuration.ResolveDescriptionAttributes) + { + description ??= typeInfo.Type.GetCustomAttribute()?.Description; + } + + if (parentPolymorphicTypeInfo is null && typeInfo.PolymorphismOptions is { DerivedTypes.Count: > 0 } polyOptions) + { + // This is the base type of a polymorphic type hierarchy. The schema for this type + // will include an "anyOf" property with the schemas for all derived types. + + string typeDiscriminatorKey = polyOptions.TypeDiscriminatorPropertyName; + List derivedTypes = polyOptions.DerivedTypes.ToList(); + + if (!typeInfo.Type.IsAbstract && !derivedTypes.Any(derived => derived.DerivedType == typeInfo.Type)) + { + // For non-abstract base types that haven't been explicitly configured, + // add a trivial schema to the derived types since we should support it. + derivedTypes.Add(new JsonDerivedType(typeInfo.Type)); + } + + bool containsTypesWithoutDiscriminator = derivedTypes.Exists(static derivedTypes => derivedTypes.TypeDiscriminator is null); + JsonSchemaType schemaType = JsonSchemaType.Any; + List? anyOf = new(derivedTypes.Count); + + state.PushSchemaNode(JsonSchemaConstants.AnyOfPropertyName); + + foreach (JsonDerivedType derivedType in derivedTypes) + { + Debug.Assert(derivedType.TypeDiscriminator is null or int or string); + + KeyValuePair? derivedTypeDiscriminator = null; + if (derivedType.TypeDiscriminator is { } discriminatorValue) + { + JsonNode discriminatorNode = discriminatorValue switch + { + string stringId => (JsonNode)stringId, + _ => (JsonNode)(int)discriminatorValue, + }; + + JsonSchema discriminatorSchema = new() { Constant = discriminatorNode }; + derivedTypeDiscriminator = new(typeDiscriminatorKey, discriminatorSchema); + } + + JsonTypeInfo derivedTypeInfo = typeInfo.Options.GetTypeInfo(derivedType.DerivedType); + + state.PushSchemaNode(anyOf.Count.ToString(CultureInfo.InvariantCulture)); + JsonSchema derivedSchema = MapJsonSchemaCore( + ref state, + derivedTypeInfo, + parentPolymorphicTypeInfo: typeInfo, + typeDiscriminator: derivedTypeDiscriminator, + parentPolymorphicTypeContainsTypesWithoutDiscriminator: containsTypesWithoutDiscriminator, + parentPolymorphicTypeIsNonNullable: isNonNullableType, + cacheResult: false); + + state.PopSchemaNode(); + + // Determine if all derived schemas have the same type. + if (anyOf.Count == 0) + { + schemaType = derivedSchema.Type; + } + else if (schemaType != derivedSchema.Type) + { + schemaType = JsonSchemaType.Any; + } + + anyOf.Add(derivedSchema); + } + + state.PopSchemaNode(); + + if (schemaType is not JsonSchemaType.Any) + { + // If all derived types have the same schema type, we can simplify the schema + // by moving the type keyword to the base schema and removing it from the derived schemas. + foreach (JsonSchema derivedSchema in anyOf) + { + derivedSchema.Type = JsonSchemaType.Any; + + if (derivedSchema.KeywordCount == 0) + { + // if removing the type results in an empty schema, + // remove the anyOf array entirely since it's always true. + anyOf = null; + break; + } + } + } + + schema = new() + { + Type = schemaType, + AnyOf = anyOf, + + // If all derived types have a discriminator, we can require it in the base schema. + Required = containsTypesWithoutDiscriminator ? null : new() { typeDiscriminatorKey }, + }; + + return CompleteSchema(ref state, schema); + } + + if (Nullable.GetUnderlyingType(typeInfo.Type) is Type nullableElementType) + { + JsonTypeInfo elementTypeInfo = typeInfo.Options.GetTypeInfo(nullableElementType); + customConverter = ExtractCustomNullableConverter(customConverter); + schema = MapJsonSchemaCore(ref state, elementTypeInfo, customConverter: customConverter, cacheResult: false); + + if (schema.Enum != null) + { + Debug.Assert(elementTypeInfo.Type.IsEnum, "The enum keyword should only be populated by schemas for enum types."); + schema.Enum.Add(null); // Append null to the enum array. + } + + return CompleteSchema(ref state, schema); + } + + switch (typeInfo.Kind) + { + case JsonTypeInfoKind.Object: + List>? properties = null; + List? required = null; + JsonSchema? additionalProperties = null; + + JsonUnmappedMemberHandling effectiveUnmappedMemberHandling = typeInfo.UnmappedMemberHandling ?? typeInfo.Options.UnmappedMemberHandling; + if (effectiveUnmappedMemberHandling is JsonUnmappedMemberHandling.Disallow) + { + // Disallow unspecified properties. + additionalProperties = JsonSchema.False; + } + + if (typeDiscriminator is { } typeDiscriminatorPair) + { + (properties ??= new()).Add(typeDiscriminatorPair); + if (parentPolymorphicTypeContainsTypesWithoutDiscriminator) + { + // Require the discriminator here since it's not common to all derived types. + (required ??= new()).Add(typeDiscriminatorPair.Key); + } + } + + Func? parameterInfoMapper = ResolveJsonConstructorParameterMapper(typeInfo); + + state.PushSchemaNode(JsonSchemaConstants.PropertiesPropertyName); + foreach (JsonPropertyInfo property in typeInfo.Properties) + { + if (property is { Get: null, Set: null } or { IsExtensionData: true }) + { + continue; // Skip JsonIgnored properties and extension data + } + + JsonNumberHandling? propertyNumberHandling = property.NumberHandling ?? effectiveNumberHandling; + JsonTypeInfo propertyTypeInfo = typeInfo.Options.GetTypeInfo(property.PropertyType); + + // Resolve the attribute provider for the property. + ICustomAttributeProvider? attributeProvider = ResolveAttributeProvider(typeInfo.Type, property); + + // Resolve property-level description attributes. + string? propertyDescription = state.Configuration.ResolveDescriptionAttributes + ? attributeProvider?.GetCustomAttributes(inherit: true).OfType().FirstOrDefault()?.Description + : null; + + // Declare the property as nullable if either getter or setter are nullable. + bool isNonNullableProperty = false; + if (attributeProvider is MemberInfo memberInfo) + { + NullabilityInfo nullabilityInfo = state.NullabilityInfoContext.GetMemberNullability(memberInfo); + isNonNullableProperty = + (property.Get is null || nullabilityInfo.ReadState is NullabilityState.NotNull) && + (property.Set is null || nullabilityInfo.WriteState is NullabilityState.NotNull); + } + + bool isRequired = property.IsRequired; + bool hasDefaultValue = false; + JsonNode? defaultValue = null; + + ParameterInfo? associatedParameter = parameterInfoMapper?.Invoke(property); + if (associatedParameter != null) + { + ResolveParameterInfo( + associatedParameter, + propertyTypeInfo, + state.NullabilityInfoContext, + state.Configuration, + out hasDefaultValue, + out defaultValue, + out bool isNonNullableParameter, + ref propertyDescription, + ref isRequired); + + isNonNullableProperty &= isNonNullableParameter; + } + + state.PushSchemaNode(property.Name); + JsonSchema propertySchema = MapJsonSchemaCore( + ref state, + propertyTypeInfo, + parentType: typeInfo.Type, + propertyInfo: property, + parameterInfo: associatedParameter, + propertyAttributeProvider: attributeProvider, + isNonNullableType: isNonNullableProperty, + description: propertyDescription, + customConverter: property.CustomConverter, + customNumberHandling: propertyNumberHandling); + + state.PopSchemaNode(); + + if (hasDefaultValue) + { + JsonSchema.EnsureMutable(ref propertySchema); + propertySchema.DefaultValue = defaultValue; + propertySchema.HasDefaultValue = true; + } + + (properties ??= new()).Add(new(property.Name, propertySchema)); + + if (isRequired) + { + (required ??= new()).Add(property.Name); + } + } + + state.PopSchemaNode(); + return CompleteSchema(ref state, new() + { + Type = JsonSchemaType.Object, + Properties = properties, + Required = required, + AdditionalProperties = additionalProperties, + }); + + case JsonTypeInfoKind.Enumerable: + Type elementType = GetElementType(typeInfo); + JsonTypeInfo elementTypeInfo = typeInfo.Options.GetTypeInfo(elementType); + + if (typeDiscriminator is null) + { + state.PushSchemaNode(JsonSchemaConstants.ItemsPropertyName); + JsonSchema items = MapJsonSchemaCore(ref state, elementTypeInfo, customNumberHandling: effectiveNumberHandling); + state.PopSchemaNode(); + + return CompleteSchema(ref state, new() + { + Type = JsonSchemaType.Array, + Items = items.IsTrue ? null : items, + }); + } + else + { + // Polymorphic enumerable types are represented using a wrapping object: + // { "$type" : "discriminator", "$values" : [element1, element2, ...] } + // Which corresponds to the schema + // { "properties" : { "$type" : { "const" : "discriminator" }, "$values" : { "type" : "array", "items" : { ... } } } } + const string ValuesKeyword = "$values"; + + state.PushSchemaNode(JsonSchemaConstants.PropertiesPropertyName); + state.PushSchemaNode(ValuesKeyword); + state.PushSchemaNode(JsonSchemaConstants.ItemsPropertyName); + + JsonSchema items = MapJsonSchemaCore(ref state, elementTypeInfo, customNumberHandling: effectiveNumberHandling); + + state.PopSchemaNode(); + state.PopSchemaNode(); + state.PopSchemaNode(); + + return CompleteSchema(ref state, new() + { + Type = JsonSchemaType.Object, + Properties = new() + { + typeDiscriminator.Value, + new(ValuesKeyword, + new JsonSchema() + { + Type = JsonSchemaType.Array, + Items = items.IsTrue ? null : items, + }), + }, + Required = parentPolymorphicTypeContainsTypesWithoutDiscriminator ? new() { typeDiscriminator.Value.Key } : null, + }); + } + + case JsonTypeInfoKind.Dictionary: + Type valueType = GetElementType(typeInfo); + JsonTypeInfo valueTypeInfo = typeInfo.Options.GetTypeInfo(valueType); + + List>? dictProps = null; + List? dictRequired = null; + + if (typeDiscriminator is { } dictDiscriminator) + { + dictProps = new() { dictDiscriminator }; + if (parentPolymorphicTypeContainsTypesWithoutDiscriminator) + { + // Require the discriminator here since it's not common to all derived types. + dictRequired = new() { dictDiscriminator.Key }; + } + } + + state.PushSchemaNode(JsonSchemaConstants.AdditionalPropertiesPropertyName); + JsonSchema valueSchema = MapJsonSchemaCore(ref state, valueTypeInfo, customNumberHandling: effectiveNumberHandling); + state.PopSchemaNode(); + + return CompleteSchema(ref state, new() + { + Type = JsonSchemaType.Object, + Properties = dictProps, + Required = dictRequired, + AdditionalProperties = valueSchema.IsTrue ? null : valueSchema, + }); + + default: + Debug.Assert(typeInfo.Kind is JsonTypeInfoKind.None); + + if (s_simpleTypeSchemaFactories.TryGetValue(typeInfo.Type, out Func? simpleTypeSchemaFactory)) + { + schema = simpleTypeSchemaFactory(effectiveNumberHandling); + } + else if (typeInfo.Type.IsEnum) + { + schema = GetEnumConverterSchema(typeInfo, effectiveConverter, state.Configuration); + } + else + { + schema = JsonSchema.True; + } + + return CompleteSchema(ref state, schema); + } + + JsonSchema CompleteSchema(ref GenerationState state, JsonSchema schema) + { + if (schema.Ref is null) + { + if (state.Configuration.IncludeSchemaVersion && state.CurrentDepth == 0) + { + JsonSchema.EnsureMutable(ref schema); + schema.Schema = SchemaVersion; + } + + if (description is not null) + { + JsonSchema.EnsureMutable(ref schema); + schema.Description = description; + } + + // A schema is marked as nullable if either + // 1. We have a schema for a property where either the getter or setter are marked as nullable. + // 2. We have a schema for a reference type, unless we're explicitly treating null-oblivious types as non-nullable. + bool isNullableSchema = (propertyInfo != null || parameterInfo != null) + ? !isNonNullableType + : CanBeNull(typeInfo.Type) && !parentPolymorphicTypeIsNonNullable && !state.Configuration.TreatNullObliviousAsNonNullable; + + if (isNullableSchema) + { + schema.MakeNullable(); + } + + if (cacheResult) + { + state.PopGeneratedType(); + } + } + + if (state.Configuration.TransformSchemaNode != null) + { + // Prime the schema for invocation by the JsonNode transformer. + schema.GenerationContext = new(typeInfo, parentType, propertyInfo, parameterInfo, propertyAttributeProvider); + } + + return schema; + } + } + + private readonly ref struct GenerationState + { + private readonly List _currentPath; + private readonly List<(JsonTypeInfo typeInfo, JsonPropertyInfo? propertyInfo, int depth)> _generationStack; + private readonly int _maxDepth; + + public GenerationState(JsonSchemaMapperConfiguration configuration, JsonSerializerOptions options, NullabilityInfoContext? nullabilityInfoContext = null) + { + Configuration = configuration; + NullabilityInfoContext = nullabilityInfoContext ?? new(); + _maxDepth = options.MaxDepth is 0 ? 64 : options.MaxDepth; + _generationStack = new(); + _currentPath = new(); + } + + public JsonSchemaMapperConfiguration Configuration { get; } + public NullabilityInfoContext NullabilityInfoContext { get; } + public int CurrentDepth => _currentPath.Count; + + public void PushSchemaNode(string nodeId) + { + if (CurrentDepth == _maxDepth) + { + ThrowHelpers.ThrowInvalidOperationException_MaxDepthReached(); + } + + _currentPath.Add(nodeId); + } + + public void PopSchemaNode() + { + _currentPath.RemoveAt(_currentPath.Count - 1); + } + + /// + /// Pushes the current type/property to the generation stack or returns a JSON pointer if the type is recursive. + /// + public bool TryPushType(JsonTypeInfo typeInfo, JsonPropertyInfo? propertyInfo, [NotNullWhen(true)] out string? existingJsonPointer) + { + foreach ((JsonTypeInfo otherTypeInfo, JsonPropertyInfo? otherPropertyInfo, int depth) in _generationStack) + { + if (typeInfo == otherTypeInfo && propertyInfo == otherPropertyInfo) + { + existingJsonPointer = FormatJsonPointer(_currentPath, depth); + return true; + } + } + + _generationStack.Add((typeInfo, propertyInfo, CurrentDepth)); + existingJsonPointer = null; + return false; + } + + public void PopGeneratedType() + { + Debug.Assert(_generationStack.Count > 0); + _generationStack.RemoveAt(_generationStack.Count - 1); + } + + private static string FormatJsonPointer(List currentPathList, int depth) + { + Debug.Assert(0 <= depth && depth < currentPathList.Count); + + if (depth == 0) + { + return "#"; + } + + StringBuilder sb = new(); + sb.Append('#'); + + for (int i = 0; i < depth; i++) + { + string segment = currentPathList[i]; + if (segment.AsSpan().IndexOfAny('~', '/') != -1) + { + segment = segment.Replace("~", "~0").Replace("/", "~1"); + } + + sb.Append('/'); + sb.Append(segment); + } + + return sb.ToString(); + } + } + + private static readonly Dictionary> s_simpleTypeSchemaFactories = new() + { + [typeof(object)] = _ => JsonSchema.True, + [typeof(bool)] = _ => new JsonSchema { Type = JsonSchemaType.Boolean }, + [typeof(byte)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(ushort)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(uint)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(ulong)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(sbyte)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(short)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(int)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(long)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(float)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Number, numberHandling, isIeeeFloatingPoint: true), + [typeof(double)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Number, numberHandling, isIeeeFloatingPoint: true), + [typeof(decimal)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Number, numberHandling), +#if NET6_0_OR_GREATER + [typeof(Half)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Number, numberHandling, isIeeeFloatingPoint: true), +#endif +#if NET7_0_OR_GREATER + [typeof(UInt128)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), + [typeof(Int128)] = numberHandling => GetSchemaForNumericType(JsonSchemaType.Integer, numberHandling), +#endif + [typeof(char)] = _ => new JsonSchema { Type = JsonSchemaType.String, MinLength = 1, MaxLength = 1 }, + [typeof(string)] = _ => new JsonSchema { Type = JsonSchemaType.String }, + [typeof(byte[])] = _ => new JsonSchema { Type = JsonSchemaType.String }, + [typeof(Memory)] = _ => new JsonSchema { Type = JsonSchemaType.String }, + [typeof(ReadOnlyMemory)] = _ => new JsonSchema { Type = JsonSchemaType.String }, + [typeof(DateTime)] = _ => new JsonSchema { Type = JsonSchemaType.String, Format = "date-time" }, + [typeof(DateTimeOffset)] = _ => new JsonSchema { Type = JsonSchemaType.String, Format = "date-time" }, + [typeof(TimeSpan)] = _ => new JsonSchema + { + Comment = "Represents a System.TimeSpan value.", + Type = JsonSchemaType.String, + Pattern = @"^-?(\d+\.)?\d{2}:\d{2}:\d{2}(\.\d{1,7})?$", + }, + +#if NET6_0_OR_GREATER + [typeof(DateOnly)] = _ => new JsonSchema { Type = JsonSchemaType.String, Format = "date" }, + [typeof(TimeOnly)] = _ => new JsonSchema { Type = JsonSchemaType.String, Format = "time" }, +#endif + [typeof(Guid)] = _ => new JsonSchema { Type = JsonSchemaType.String, Format = "uuid" }, + [typeof(Uri)] = _ => new JsonSchema { Type = JsonSchemaType.String, Format = "uri" }, + [typeof(Version)] = _ => new JsonSchema + { + Comment = "Represents a version string.", + Type = JsonSchemaType.String, + Pattern = @"^\d+(\.\d+){1,3}$", + }, + + [typeof(JsonDocument)] = _ => new JsonSchema { Type = JsonSchemaType.Any }, + [typeof(JsonElement)] = _ => new JsonSchema { Type = JsonSchemaType.Any }, + [typeof(JsonNode)] = _ => new JsonSchema { Type = JsonSchemaType.Any }, + [typeof(JsonValue)] = _ => new JsonSchema { Type = JsonSchemaType.Any }, + [typeof(JsonObject)] = _ => new JsonSchema { Type = JsonSchemaType.Object }, + [typeof(JsonArray)] = _ => new JsonSchema { Type = JsonSchemaType.Array }, + }; + + // Adapted from https://github.com/dotnet/runtime/blob/d606c601510c1a1a28cb6ef3550f12db049c0776/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Converters/Value/JsonPrimitiveConverter.cs#L36-L69 + private static JsonSchema GetSchemaForNumericType(JsonSchemaType schemaType, JsonNumberHandling numberHandling, bool isIeeeFloatingPoint = false) + { + Debug.Assert(schemaType is JsonSchemaType.Integer or JsonSchemaType.Number); + Debug.Assert(!isIeeeFloatingPoint || schemaType is JsonSchemaType.Number); + + string? pattern = null; + + if ((numberHandling & (JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)) != 0) + { + pattern = schemaType is JsonSchemaType.Integer + ? @"^-?(?:0|[1-9]\d*)$" + : isIeeeFloatingPoint + ? @"^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$" + : @"^-?(?:0|[1-9]\d*)(?:\.\d+)?$"; + + schemaType |= JsonSchemaType.String; + } + + if (isIeeeFloatingPoint && (numberHandling & JsonNumberHandling.AllowNamedFloatingPointLiterals) != 0) + { + return new JsonSchema + { + AnyOf = new() + { + new JsonSchema { Type = schemaType, Pattern = pattern }, + new JsonSchema { Enum = new() { (JsonNode)"NaN", (JsonNode)"Infinity", (JsonNode)"-Infinity" } }, + }, + }; + } + + return new JsonSchema { Type = schemaType, Pattern = pattern }; + } + + // Uses reflection to determine the element type of an enumerable or dictionary type + // Workaround for https://github.com/dotnet/runtime/issues/77306#issuecomment-2007887560 + private static Type GetElementType(JsonTypeInfo typeInfo) + { + Debug.Assert(typeInfo.Kind is JsonTypeInfoKind.Enumerable or JsonTypeInfoKind.Dictionary); + s_elementTypeProperty ??= typeof(JsonTypeInfo).GetProperty("ElementType", BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + return (Type)s_elementTypeProperty?.GetValue(typeInfo)!; + } + + private static PropertyInfo? s_elementTypeProperty; + + // The source generator currently doesn't populate attribute providers for properties + // cf. https://github.com/dotnet/runtime/issues/100095 + // Work around the issue by running a query for the relevant MemberInfo using the internal MemberName property + // https://github.com/dotnet/runtime/blob/de774ff9ee1a2c06663ab35be34b755cd8d29731/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Metadata/JsonPropertyInfo.cs#L206 +#if NETCOREAPP + [EditorBrowsable(EditorBrowsableState.Never)] + [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", + Justification = "We're reading the internal JsonPropertyInfo.MemberName which cannot have been trimmed away.")] + [UnconditionalSuppressMessage("Trimming", "IL2070:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The parameter of method does not have matching annotations.", + Justification = "We're reading the member which is already accessed by the source generator.")] +#endif + private static ICustomAttributeProvider? ResolveAttributeProvider(Type? declaringType, JsonPropertyInfo? propertyInfo) + { + if (declaringType is null || propertyInfo is null) + { + return null; + } + + if (propertyInfo.AttributeProvider is { } provider) + { + return provider; + } + + s_memberNameProperty ??= typeof(JsonPropertyInfo).GetProperty("MemberName", BindingFlags.Instance | BindingFlags.NonPublic)!; + var memberName = (string?)s_memberNameProperty.GetValue(propertyInfo); + if (memberName is not null) + { + return declaringType.GetMember(memberName, MemberTypes.Property | MemberTypes.Field, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic).FirstOrDefault(); + } + + return null; + } + + private static PropertyInfo? s_memberNameProperty; + + // Uses reflection to determine any custom converters specified for the element of a nullable type. +#if NETCOREAPP + [UnconditionalSuppressMessage("Trimming", "IL2026", + Justification = "We're resolving private fields of the built-in Nullable converter which cannot have been trimmed away.")] +#endif + private static JsonConverter? ExtractCustomNullableConverter(JsonConverter? converter) + { + Debug.Assert(converter is null || IsBuiltInConverter(converter)); + + // There is unfortunately no way in which we can obtain the element converter from a nullable converter without resorting to private reflection + // https://github.com/dotnet/runtime/blob/5fda47434cecc590095e9aef3c4e560b7b7ebb47/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Converters/Value/NullableConverter.cs#L15-L17 + Type? converterType = converter?.GetType(); + if (converterType?.Name == "NullableConverter`1") + { + FieldInfo elementConverterField = converterType.GetPrivateFieldWithPotentiallyTrimmedMetadata("_elementConverter"); + return (JsonConverter)elementConverterField!.GetValue(converter)!; + } + + return null; + } + + // Uses reflection to determine schema for enum types + // Adapted from https://github.com/dotnet/runtime/blob/d606c601510c1a1a28cb6ef3550f12db049c0776/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Converters/Value/EnumConverter.cs#L498-L521 +#if NETCOREAPP + [UnconditionalSuppressMessage("Trimming", "IL2026", + Justification = "We're resolving private fields of the built-in enum converter which cannot have been trimmed away.")] +#endif + private static JsonSchema GetEnumConverterSchema(JsonTypeInfo typeInfo, JsonConverter converter, JsonSchemaMapperConfiguration configuration) + { + Debug.Assert(typeInfo.Type.IsEnum && IsBuiltInConverter(converter)); + + if (converter is JsonConverterFactory factory) + { + converter = factory.CreateConverter(typeInfo.Type, typeInfo.Options)!; + } + + Type converterType = converter.GetType(); + FieldInfo converterOptionsField = converterType.GetPrivateFieldWithPotentiallyTrimmedMetadata("_converterOptions"); + FieldInfo namingPolicyField = converterType.GetPrivateFieldWithPotentiallyTrimmedMetadata("_namingPolicy"); + + const int EnumConverterOptionsAllowStrings = 1; + var converterOptions = (int)converterOptionsField!.GetValue(converter)!; + if ((converterOptions & EnumConverterOptionsAllowStrings) != 0) + { + // This explicitly ignores the integer component in converters configured as AllowNumbers | AllowStrings + // which is the default for JsonStringEnumConverter. This sacrifices some precision in the schema for simplicity. + + if (typeInfo.Type.GetCustomAttribute() is not null) + { + // Do not report enum values in case of flags. + return new() { Type = JsonSchemaType.String }; + } + + var namingPolicy = (JsonNamingPolicy?)namingPolicyField!.GetValue(converter)!; + JsonArray enumValues = new(); + foreach (string name in Enum.GetNames(typeInfo.Type)) + { + // This does not account for custom names specified via the new + // JsonStringEnumMemberNameAttribute introduced in .NET 9. + string effectiveName = namingPolicy?.ConvertName(name) ?? name; + enumValues.Add((JsonNode)effectiveName); + } + + JsonSchema schema = new() { Enum = enumValues }; + if (configuration.IncludeTypeInEnums) + { + schema.Type = JsonSchemaType.String; + } + + return schema; + } + + return new() { Type = JsonSchemaType.Integer }; + } + +#if NETCOREAPP + [RequiresUnreferencedCode("Resolves unreferenced member metadata.")] +#endif + private static FieldInfo GetPrivateFieldWithPotentiallyTrimmedMetadata(this Type type, string fieldName) + { + FieldInfo? field = type.GetField(fieldName, BindingFlags.Instance | BindingFlags.NonPublic); + if (field is null) + { + throw new InvalidOperationException( + $"Could not resolve metadata for field '{fieldName}' in type '{type}'. " + + "If running Native AOT ensure that the 'IlcTrimMetadata' property has been disabled."); + } + + return field; + } + + // Resolves the parameters of the deserialization constructor for a type, if they exist. +#if NETCOREAPP + [UnconditionalSuppressMessage("Trimming", "IL2072:Target parameter argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", + Justification = "The deserialization constructor should have already been referenced by the source generator and therefore will not have been trimmed.")] +#endif + private static Func? ResolveJsonConstructorParameterMapper(JsonTypeInfo typeInfo) + { + Debug.Assert(typeInfo.Kind is JsonTypeInfoKind.Object); + + if (typeInfo.Properties.Count > 0 && + typeInfo.CreateObject is null && // Ensure that a default constructor isn't being used + typeInfo.Type.TryGetDeserializationConstructor(useDefaultCtorInAnnotatedStructs: true, out ConstructorInfo? ctor)) + { + ParameterInfo[]? parameters = ctor?.GetParameters(); + if (parameters?.Length > 0) + { + Dictionary dict = new(parameters.Length); + foreach (ParameterInfo parameter in parameters) + { + if (parameter.Name is not null) + { + // We don't care about null parameter names or conflicts since they + // would have already been rejected by JsonTypeInfo configuration. + dict[new(parameter.Name, parameter.ParameterType)] = parameter; + } + } + + return prop => dict.TryGetValue(new(prop.Name, prop.PropertyType), out ParameterInfo? parameter) ? parameter : null; + } + } + + return null; + } + + // Parameter to property matching semantics as declared in + // https://github.com/dotnet/runtime/blob/12d96ccfaed98e23c345188ee08f8cfe211c03e7/src/libraries/System.Text.Json/src/System/Text/Json/Serialization/Metadata/JsonTypeInfo.cs#L1007-L1030 + private readonly struct ParameterLookupKey : IEquatable + { + public ParameterLookupKey(string name, Type type) + { + Name = name; + Type = type; + } + + public string Name { get; } + public Type Type { get; } + + public override int GetHashCode() => StringComparer.OrdinalIgnoreCase.GetHashCode(Name); + public bool Equals(ParameterLookupKey other) => Type == other.Type && string.Equals(Name, other.Name, StringComparison.OrdinalIgnoreCase); + public override bool Equals(object? obj) => obj is ParameterLookupKey key && Equals(key); + } + + // Resolves the deserialization constructor for a type using logic copied from + // https://github.com/dotnet/runtime/blob/e12e2fa6cbdd1f4b0c8ad1b1e2d960a480c21703/src/libraries/System.Text.Json/Common/ReflectionExtensions.cs#L227-L286 + private static bool TryGetDeserializationConstructor( +#if NETCOREAPP + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.NonPublicConstructors)] +#endif + this Type type, + bool useDefaultCtorInAnnotatedStructs, + out ConstructorInfo? deserializationCtor) + { + ConstructorInfo? ctorWithAttribute = null; + ConstructorInfo? publicParameterlessCtor = null; + ConstructorInfo? lonePublicCtor = null; + + ConstructorInfo[] constructors = type.GetConstructors(BindingFlags.Public | BindingFlags.Instance); + + if (constructors.Length == 1) + { + lonePublicCtor = constructors[0]; + } + + foreach (ConstructorInfo constructor in constructors) + { + if (HasJsonConstructorAttribute(constructor)) + { + if (ctorWithAttribute != null) + { + deserializationCtor = null; + return false; + } + + ctorWithAttribute = constructor; + } + else if (constructor.GetParameters().Length == 0) + { + publicParameterlessCtor = constructor; + } + } + + // Search for non-public ctors with [JsonConstructor]. + foreach (ConstructorInfo constructor in type.GetConstructors(BindingFlags.NonPublic | BindingFlags.Instance)) + { + if (HasJsonConstructorAttribute(constructor)) + { + if (ctorWithAttribute != null) + { + deserializationCtor = null; + return false; + } + + ctorWithAttribute = constructor; + } + } + + // Structs will use default constructor if attribute isn't used. + if (useDefaultCtorInAnnotatedStructs && type.IsValueType && ctorWithAttribute == null) + { + deserializationCtor = null; + return true; + } + + deserializationCtor = ctorWithAttribute ?? publicParameterlessCtor ?? lonePublicCtor; + return true; + + static bool HasJsonConstructorAttribute(ConstructorInfo constructorInfo) => + constructorInfo.GetCustomAttribute() != null; + } + + private static bool IsBuiltInConverter(JsonConverter converter) => + converter.GetType().Assembly == typeof(JsonConverter).Assembly; + + // Resolves the nullable reference type annotations for a property or field, + // additionally addressing a few known bugs of the NullabilityInfo pre .NET 9. + private static NullabilityInfo GetMemberNullability(this NullabilityInfoContext context, MemberInfo memberInfo) + { + Debug.Assert(memberInfo is PropertyInfo or FieldInfo); + return memberInfo is PropertyInfo prop + ? context.Create(prop) + : context.Create((FieldInfo)memberInfo); + } + + private static bool CanBeNull(Type type) => !type.IsValueType || Nullable.GetUnderlyingType(type) is not null; + + private static partial class ThrowHelpers + { + [DoesNotReturn] + public static void ThrowInvalidOperationException_MaxDepthReached() => + throw new InvalidOperationException("The depth of the generated JSON schema exceeds the JsonSerializerOptions.MaxDepth setting."); + } +} +#endif diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv9.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv9.cs new file mode 100644 index 000000000000..82c7fd005cfc --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.STJv9.cs @@ -0,0 +1,190 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +// Source copied from https://github.com/eiriktsarpalis/stj-schema-mapper +// It should be kept in sync with any changes made in that repo, +// and should be removed once the relevant replacements are available in STJv9. + +#if NET9_0_OR_GREATER || SYSTEM_TEXT_JSON_V9 +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Schema; +using System.Text.Json.Serialization.Metadata; +using System.Threading.Tasks; + +namespace JsonSchemaMapper; + +#if EXPOSE_JSON_SCHEMA_MAPPER +public +#else +internal +#endif + static partial class JsonSchemaMapper +{ + // For System.Text.Json v9 or greater, JsonSchemaMapper is implemented as a shim over the + // built-in JsonSchemaExporter component. Added functionality is implemented by performing + // fix-ups over the generated schema. + + private static partial JsonNode MapRootTypeJsonSchema(JsonTypeInfo typeInfo, JsonSchemaMapperConfiguration configuration) + { + JsonSchemaExporterOptions exporterOptions = new() + { + TreatNullObliviousAsNonNullable = configuration.TreatNullObliviousAsNonNullable, + TransformSchemaNode = (JsonSchemaExporterContext ctx, JsonNode schema) => ApplySchemaTransformations(schema, ctx, configuration), + }; + + return JsonSchemaExporter.GetJsonSchemaAsNode(typeInfo, exporterOptions); + } + + private static partial JsonNode MapMethodParameterJsonSchema( + ParameterInfo parameterInfo, + JsonTypeInfo parameterTypeInfo, + JsonSchemaMapperConfiguration configuration, + NullabilityInfoContext nullabilityContext, + out bool isRequired) + { + Debug.Assert(parameterInfo.Name != null); + + JsonSchemaExporterOptions exporterOptions = new() + { + TreatNullObliviousAsNonNullable = configuration.TreatNullObliviousAsNonNullable, + TransformSchemaNode = (JsonSchemaExporterContext ctx, JsonNode schema) => ApplySchemaTransformations(schema, ctx, configuration, parameterInfo.Name), + }; + + string? parameterDescription = null; + isRequired = false; + + ResolveParameterInfo( + parameterInfo, + parameterTypeInfo, + nullabilityContext, + configuration, + out bool hasDefaultValue, + out JsonNode? defaultValue, + out bool isNonNullableType, + ref parameterDescription, + ref isRequired); + + JsonNode parameterSchema = JsonSchemaExporter.GetJsonSchemaAsNode(parameterTypeInfo, exporterOptions); + + if (parameterDescription is not null) + { + ConvertSchemaToObject(ref parameterSchema).Insert(0, JsonSchemaConstants.DescriptionPropertyName, (JsonNode)parameterDescription); + } + + if (hasDefaultValue) + { + ConvertSchemaToObject(ref parameterSchema).Add(JsonSchemaConstants.DefaultPropertyName, defaultValue); + } + + if (isNonNullableType && + parameterSchema is JsonObject parameterSchemaObj && + parameterSchemaObj.TryGetPropertyValue(JsonSchemaConstants.TypePropertyName, out JsonNode? typeSchema) && + typeSchema is JsonArray typeArray) + { + for (int i = 0; i < typeArray.Count; i++) + { + if (typeArray[i]!.GetValue() is "null") + { + typeArray.RemoveAt(i); + break; + } + } + + if (typeArray.Count == 1) + { + parameterSchemaObj[JsonSchemaConstants.TypePropertyName] = (JsonNode)(string)typeArray[0]!; + } + } + + return parameterSchema; + } + + private static JsonNode ApplySchemaTransformations( + JsonNode schema, + JsonSchemaExporterContext ctx, + JsonSchemaMapperConfiguration configuration, + string? parameterName = null) + { + JsonSchemaGenerationContext mapperCtx = new( + ctx.TypeInfo, + ctx.TypeInfo.Type, + ctx.PropertyInfo, + (ParameterInfo?)ctx.PropertyInfo?.AssociatedParameter?.AttributeProvider, + ctx.PropertyInfo?.AttributeProvider); + + if (configuration.IncludeTypeInEnums) + { + if (ctx.TypeInfo.Type.IsEnum && + schema is JsonObject enumSchema && + enumSchema.ContainsKey(JsonSchemaConstants.EnumPropertyName)) + { + enumSchema.Insert(0, JsonSchemaConstants.TypePropertyName, (JsonNode)"string"); + } + else if ( + Nullable.GetUnderlyingType(ctx.TypeInfo.Type) is Type { IsEnum: true } && + schema is JsonObject nullableEnumSchema && + nullableEnumSchema.ContainsKey(JsonSchemaConstants.EnumPropertyName)) + { + nullableEnumSchema.Insert(0, JsonSchemaConstants.TypePropertyName, new JsonArray() { (JsonNode)"string", (JsonNode)"null" }); + } + } + + if (configuration.ResolveDescriptionAttributes && mapperCtx.GetAttribute() is DescriptionAttribute attr) + { + ConvertSchemaToObject(ref schema).Insert(0, JsonSchemaConstants.DescriptionPropertyName, (JsonNode)attr.Description); + } + + if (parameterName is null && configuration.IncludeSchemaVersion && ctx.Path.IsEmpty) + { + ConvertSchemaToObject(ref schema).Insert(0, JsonSchemaConstants.SchemaPropertyName, (JsonNode)SchemaVersion); + } + + if (configuration.TransformSchemaNode is { } callback) + { + schema = callback(mapperCtx, schema); + } + + if (parameterName != null && schema is JsonObject refObj && + refObj.TryGetPropertyValue(JsonSchemaConstants.RefPropertyName, out JsonNode? paramName)) + { + // Fix up any $ref URIs to match the path from the root document. + string refUri = paramName!.GetValue(); + Debug.Assert(refUri is "#" || refUri.StartsWith("#/", StringComparison.Ordinal)); + refUri = refUri == "#" + ? $"#/{JsonSchemaConstants.PropertiesPropertyName}/{parameterName}" + : $"#/{JsonSchemaConstants.PropertiesPropertyName}/{parameterName}/{refUri[2..]}"; + + refObj[JsonSchemaConstants.RefPropertyName] = (JsonNode)refUri; + } + + return schema; + } + + private static JsonObject ConvertSchemaToObject(ref JsonNode schema) + { + JsonObject jObj; + + switch (schema.GetValueKind()) + { + case JsonValueKind.Object: + return (JsonObject)schema; + + case JsonValueKind.False: + schema = jObj = new() { [JsonSchemaConstants.NotPropertyName] = true }; + return jObj; + + default: + Debug.Assert(schema.GetValueKind() is JsonValueKind.True, "invalid schema type."); + schema = jObj = new JsonObject(); + return jObj; + } + } +} +#endif diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.cs index 55e7763b786f..c19a56330e4c 100644 --- a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.cs +++ b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapper.cs @@ -1,11 +1,13 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +// Source copied from https://github.com/eiriktsarpalis/stj-schema-mapper +// It should be kept in sync with any changes made in that repo, +// and should be removed once the relevant replacements are available in STJv9. + using System; -using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; -using System.Globalization; using System.Linq; using System.Reflection; using System.Text.Json; @@ -19,12 +21,12 @@ namespace JsonSchemaMapper; /// Maps .NET types to JSON schema objects using contract metadata from instances. /// #if EXPOSE_JSON_SCHEMA_MAPPER - public +public #else [ExcludeFromCodeCoverage] internal #endif -static partial class JsonSchemaMapper + static partial class JsonSchemaMapper { /// /// The JSON schema draft version used by the generated schemas. @@ -37,10 +39,10 @@ static partial class JsonSchemaMapper /// The options instance from which to resolve the contract metadata. /// The root type for which to generate the JSON schema. /// The configuration object controlling the schema generation. - /// A new instance defining the JSON schema for . + /// A new instance defining the JSON schema for . /// One of the specified parameters is . /// The parameter contains unsupported configuration. - public static JsonObject GetJsonSchema(this JsonSerializerOptions options, Type type, JsonSchemaMapperConfiguration? configuration = null) + public static JsonNode GetJsonSchema(this JsonSerializerOptions options, Type type, JsonSchemaMapperConfiguration? configuration = null) { if (options is null) { @@ -54,10 +56,8 @@ public static JsonObject GetJsonSchema(this JsonSerializerOptions options, Type ValidateOptions(options); configuration ??= JsonSchemaMapperConfiguration.Default; - JsonTypeInfo typeInfo = options.GetTypeInfo(type); - var state = new GenerationState(configuration); - return MapJsonSchemaCore(typeInfo, ref state); + return MapRootTypeJsonSchema(typeInfo, configuration); } /// @@ -66,10 +66,10 @@ public static JsonObject GetJsonSchema(this JsonSerializerOptions options, Type /// The options instance from which to resolve the contract metadata. /// The method from whose parameters to generate the JSON schema. /// The configuration object controlling the schema generation. - /// A new instance defining the JSON schema for . + /// A new instance defining the JSON schema for . /// One of the specified parameters is . /// The parameter contains unsupported configuration. - public static JsonObject GetJsonSchema(this JsonSerializerOptions options, MethodBase method, JsonSchemaMapperConfiguration? configuration = null) + public static JsonNode GetJsonSchema(this JsonSerializerOptions options, MethodBase method, JsonSchemaMapperConfiguration? configuration = null) { if (options is null) { @@ -84,52 +84,60 @@ public static JsonObject GetJsonSchema(this JsonSerializerOptions options, Metho ValidateOptions(options); configuration ??= JsonSchemaMapperConfiguration.Default; - var state = new GenerationState(configuration); - string title = method.Name; - string? description = configuration.ResolveDescriptionAttributes - ? method.GetCustomAttribute()?.Description - : null; + JsonObject schema = new(); + + if (configuration.IncludeSchemaVersion) + { + schema.Add(JsonSchemaConstants.SchemaPropertyName, SchemaVersion); + } + + schema.Add(JsonSchemaConstants.TitlePropertyName, method.Name); - JsonSchemaType type = JsonSchemaType.Object; + if (configuration.ResolveDescriptionAttributes && + method.GetCustomAttribute() is DescriptionAttribute attr) + { + schema.Add(JsonSchemaConstants.DescriptionPropertyName, attr.Description); + } + + schema.Add(JsonSchemaConstants.TypePropertyName, "object"); + + NullabilityInfoContext nullabilityInfoContext = new(); JsonObject? paramSchemas = null; JsonArray? requiredParams = null; - foreach (ParameterInfo parameter in method.GetParameters()) + foreach (ParameterInfo parameterInfo in method.GetParameters()) { - if (parameter.Name is null) + if (parameterInfo.Name is null) { ThrowHelpers.ThrowInvalidOperationException_TrimmedMethodParameters(method); } - JsonTypeInfo parameterInfo = options.GetTypeInfo(parameter.ParameterType); - bool isNullableReferenceType = false; - string? parameterDescription = null; - bool hasDefaultValue = false; - JsonNode? defaultValue = null; - bool isRequired = false; - - ResolveParameterInfo(parameter, parameterInfo, ref state, ref parameterDescription, ref hasDefaultValue, ref defaultValue, ref isNullableReferenceType, ref isRequired); - - state.Push(parameter.Name); - JsonObject paramSchema = MapJsonSchemaCore( + JsonTypeInfo parameterTypeInfo = options.GetTypeInfo(parameterInfo.ParameterType); + JsonNode parameterSchema = MapMethodParameterJsonSchema( parameterInfo, - ref state, - title: null, - parameterDescription, - isNullableReferenceType, - hasDefaultValue: hasDefaultValue, - defaultValue: defaultValue); - - state.Pop(); + parameterTypeInfo, + configuration, + nullabilityInfoContext, + out bool isRequired); - (paramSchemas ??= []).Add(parameter.Name, paramSchema); + (paramSchemas ??= new()).Add(parameterInfo.Name, parameterSchema); if (isRequired) { - (requiredParams ??= []).Add((JsonNode)parameter.Name); + (requiredParams ??= new()).Add((JsonNode)parameterInfo.Name); } } - return CreateSchemaDocument(ref state, title: title, description: description, schemaType: type, properties: paramSchemas, requiredProperties: requiredParams); + if (paramSchemas != null) + { + schema.Add(JsonSchemaConstants.PropertiesPropertyName, paramSchemas); + } + + if (requiredParams != null) + { + schema.Add(JsonSchemaConstants.RequiredPropertyName, requiredParams); + } + + return schema; } /// @@ -137,10 +145,10 @@ public static JsonObject GetJsonSchema(this JsonSerializerOptions options, Metho /// /// The contract metadata for which to generate the schema. /// The configuration object controlling the schema generation. - /// A new instance defining the JSON schema for . + /// A new instance defining the JSON schema for . /// One of the specified parameters is . /// The parameter contains unsupported configuration. - public static JsonObject GetJsonSchema(this JsonTypeInfo typeInfo, JsonSchemaMapperConfiguration? configuration = null) + public static JsonNode GetJsonSchema(this JsonTypeInfo typeInfo, JsonSchemaMapperConfiguration? configuration = null) { if (typeInfo is null) { @@ -149,9 +157,8 @@ public static JsonObject GetJsonSchema(this JsonTypeInfo typeInfo, JsonSchemaMap ValidateOptions(typeInfo.Options); typeInfo.MakeReadOnly(); - - var state = new GenerationState(configuration ?? JsonSchemaMapperConfiguration.Default); - return MapJsonSchemaCore(typeInfo, ref state); + configuration ??= JsonSchemaMapperConfiguration.Default; + return MapRootTypeJsonSchema(typeInfo, configuration); } /// @@ -162,397 +169,51 @@ public static JsonObject GetJsonSchema(this JsonTypeInfo typeInfo, JsonSchemaMap /// The JSON node rendered as a JSON string. public static string ToJsonString(this JsonNode? node, bool writeIndented = false) { - return node is null - ? "null" - : node.ToJsonString(writeIndented ? new JsonSerializerOptions { WriteIndented = true } : null); + return node is null ? "null" : node.ToJsonString(writeIndented ? s_writeIndentedOptions : null); } - private static JsonObject MapJsonSchemaCore( - JsonTypeInfo typeInfo, - ref GenerationState state, - string? title = null, - string? description = null, - bool isNullableReferenceType = false, - bool isNullableOfTElement = false, - JsonConverter? customConverter = null, - bool hasDefaultValue = false, - JsonNode? defaultValue = null, - JsonNumberHandling? customNumberHandling = null, - KeyValuePair? derivedTypeDiscriminator = null, - Type? parentNullableOfT = null) - { - Debug.Assert(typeInfo.IsReadOnly); - - Type type = typeInfo.Type; - JsonConverter effectiveConverter = customConverter ?? typeInfo.Converter; - JsonNumberHandling? effectiveNumberHandling = customNumberHandling ?? typeInfo.NumberHandling; - bool emitsTypeDiscriminator = derivedTypeDiscriminator?.Value is not null; - bool isCacheable = !emitsTypeDiscriminator && description is null && !hasDefaultValue && !isNullableOfTElement; + private static readonly JsonSerializerOptions s_writeIndentedOptions = new() { WriteIndented = true }; - if (!IsBuiltInConverter(effectiveConverter)) - { - return []; // We can't make any schema determinations if a custom converter is used - } + private static partial JsonNode MapRootTypeJsonSchema(JsonTypeInfo typeInfo, JsonSchemaMapperConfiguration configuration); - if (isCacheable && state.TryGetGeneratedSchemaPath(type, parentNullableOfT, customConverter, isNullableReferenceType, customNumberHandling, out string? typePath)) - { - // Schema for type has already been generated, return a reference to it. - // For derived types using discriminators, the schema is generated inline. - return new JsonObject { [RefPropertyName] = typePath }; - } - - if (state.Configuration.ResolveDescriptionAttributes) - { - description ??= type.GetCustomAttribute()?.Description; - } - - if (Nullable.GetUnderlyingType(type) is Type nullableElementType) - { - // Nullable types must be handled separately - JsonTypeInfo nullableElementTypeInfo = typeInfo.Options.GetTypeInfo(nullableElementType); - customConverter = ExtractCustomNullableConverter(customConverter); - - return MapJsonSchemaCore( - nullableElementTypeInfo, - ref state, - title, - description, - hasDefaultValue: hasDefaultValue, - defaultValue: defaultValue, - customNumberHandling: customNumberHandling, - customConverter: customConverter, - parentNullableOfT: type, - isNullableOfTElement: true); - } - - if (isCacheable && typeInfo.Kind != JsonTypeInfoKind.None) - { - // For complex types such objects, arrays, and dictionaries register the current path - // so that it can be referenced by later occurrences in the type graph. Do not register - // types in a polymorphic hierarchy using discriminators as they need to be inlined. - state.RegisterTypePath(type, parentNullableOfT, customConverter, isNullableReferenceType, customNumberHandling); - } - - JsonSchemaType schemaType = JsonSchemaType.Any; - string? format = null; - string? pattern = null; - JsonObject? properties = null; - JsonArray? requiredProperties = null; - JsonObject? arrayItems = null; - JsonNode? additionalProperties = null; - JsonArray? enumValues = null; - JsonArray? anyOfTypes = null; - - if (derivedTypeDiscriminator is null && typeInfo.PolymorphismOptions is { DerivedTypes.Count: > 0 } polyOptions) - { - // This is the base type of a polymorphic type hierarchy. The schema for this type - // will include an "anyOf" property with the schemas for all derived types. - - string typeDiscriminatorKey = polyOptions.TypeDiscriminatorPropertyName; - List derivedTypes = polyOptions.DerivedTypes.ToList(); - - if (!type.IsAbstract && derivedTypes.Any(derived => derived.DerivedType == type)) - { - // For non-abstract base types that haven't been explicitly configured, - // add a trivial schema to the derived types since we should support it. - derivedTypes.Add(new JsonDerivedType(type)); - } - - state.Push(AnyOfPropertyName); - anyOfTypes = []; - - int i = 0; - foreach (JsonDerivedType derivedType in derivedTypes) - { - Debug.Assert(derivedType.TypeDiscriminator is null or int or string); - JsonNode? typeDiscriminatorPropertySchema = derivedType.TypeDiscriminator switch - { - string stringId => new JsonObject { [ConstPropertyName] = (JsonNode)stringId }, - int intId => new JsonObject { [ConstPropertyName] = (JsonNode)intId }, - _ => null, - }; - - JsonTypeInfo derivedTypeInfo = typeInfo.Options.GetTypeInfo(derivedType.DerivedType); - - state.Push(i++.ToString(CultureInfo.InvariantCulture)); - JsonObject derivedSchema = MapJsonSchemaCore( - derivedTypeInfo, - ref state, - derivedTypeDiscriminator: new(typeDiscriminatorKey, typeDiscriminatorPropertySchema)); - state.Pop(); - - anyOfTypes.Add((JsonNode)derivedSchema); - } - - state.Pop(); - goto ConstructSchemaDocument; - } - - switch (typeInfo.Kind) - { - case JsonTypeInfoKind.None: - if (s_simpleTypeInfo.TryGetValue(type, out SimpleTypeJsonSchema simpleTypeInfo)) - { - schemaType = simpleTypeInfo.SchemaType; - format = simpleTypeInfo.Format; - pattern = simpleTypeInfo.Pattern; - - if (effectiveNumberHandling is JsonNumberHandling numberHandling && - schemaType is JsonSchemaType.Integer or JsonSchemaType.Number) - { - if ((numberHandling & (JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)) != 0) - { - schemaType |= JsonSchemaType.String; - } - else if (numberHandling is JsonNumberHandling.AllowNamedFloatingPointLiterals) - { - anyOfTypes = - [ - (JsonNode)new JsonObject { [TypePropertyName] = MapSchemaType(schemaType) }, - (JsonNode)new JsonObject - { - [EnumPropertyName] = new JsonArray { (JsonNode)"NaN", (JsonNode)"Infinity", (JsonNode)"-Infinity" }, - }, - ]; - - schemaType = JsonSchemaType.Any; // reset the parent setting - } - } - } - else if (type.IsEnum) - { - if (TryGetStringEnumConverterValues(typeInfo, effectiveConverter, out enumValues)) - { - schemaType = JsonSchemaType.String; - - if (enumValues != null && isNullableOfTElement) - { - // We're generating the schema for a nullable - // enum type. Append null to the "enum" array. - enumValues.Add(null); - } - } - else - { - schemaType = JsonSchemaType.Integer; - } - } - - break; - - case JsonTypeInfoKind.Object: - schemaType = JsonSchemaType.Object; - - if (typeInfo.UnmappedMemberHandling is JsonUnmappedMemberHandling.Disallow) - { - // Disallow unspecified properties. - additionalProperties = false; - } - - if (emitsTypeDiscriminator) - { - Debug.Assert(derivedTypeDiscriminator?.Value is not null); - (properties ??= []).Add(derivedTypeDiscriminator!.Value); - (requiredProperties ??= []).Add((JsonNode)derivedTypeDiscriminator.Value.Key); - } - - Func parameterInfoMapper = ResolveJsonConstructorParameterMapper(typeInfo); - - state.Push(PropertiesPropertyName); - foreach (JsonPropertyInfo property in typeInfo.Properties) - { - if (property is { Get: null, Set: null }) - { - continue; // Skip [JsonIgnore] property - } - - if (property.IsExtensionData) - { - continue; // Extension data properties don't impact the schema. - } - - JsonNumberHandling? propertyNumberHandling = property.NumberHandling ?? effectiveNumberHandling; - JsonTypeInfo propertyTypeInfo = typeInfo.Options.GetTypeInfo(property.PropertyType); - - // Only resolve nullability metadata for reference types. - NullabilityInfoContext? nullabilityCtx = !property.PropertyType.IsValueType ? state.NullabilityInfoContext : null; - - // Only resolve the attribute provider if needed. - ICustomAttributeProvider? attributeProvider = state.Configuration.ResolveDescriptionAttributes || nullabilityCtx is not null - ? ResolveAttributeProvider(typeInfo, property) - : null; - - // Resolve property-level description attributes. - string? propertyDescription = state.Configuration.ResolveDescriptionAttributes - ? attributeProvider?.GetCustomAttributes(inherit: true).OfType().FirstOrDefault()?.Description - : null; - - // Declare the property as nullable if either getter or setter are nullable. - bool isPropertyNullableReferenceType = nullabilityCtx is not null && attributeProvider is MemberInfo memberInfo - ? nullabilityCtx.GetMemberNullability(memberInfo) is { WriteState: NullabilityState.Nullable } or { ReadState: NullabilityState.Nullable } - : false; - - bool isRequired = property.IsRequired; - bool propertyHasDefaultValue = false; - JsonNode? propertyDefaultValue = null; - - if (parameterInfoMapper(property) is ParameterInfo ctorParam) - { - ResolveParameterInfo( - ctorParam, - propertyTypeInfo, - ref state, - ref propertyDescription, - ref propertyHasDefaultValue, - ref propertyDefaultValue, - ref isPropertyNullableReferenceType, - ref isRequired); - } - - state.Push(property.Name); - JsonObject propertySchema = MapJsonSchemaCore( - typeInfo: propertyTypeInfo, - state: ref state, - title: null, - description: propertyDescription, - isNullableReferenceType: isPropertyNullableReferenceType, - customConverter: property.CustomConverter, - hasDefaultValue: propertyHasDefaultValue, - defaultValue: propertyDefaultValue, - customNumberHandling: propertyNumberHandling); - - state.Pop(); - - (properties ??= []).Add(property.Name, propertySchema); - - if (isRequired) - { - (requiredProperties ??= []).Add((JsonNode)property.Name); - } - } - - state.Pop(); - break; - - case JsonTypeInfoKind.Enumerable: - Type elementType = GetElementType(typeInfo); - JsonTypeInfo elementTypeInfo = typeInfo.Options.GetTypeInfo(elementType); - - if (emitsTypeDiscriminator) - { - Debug.Assert(derivedTypeDiscriminator is not null); - - // Polymorphic enumerable types are represented using a wrapping object: - // { "$type" : "discriminator", "$values" : [element1, element2, ...] } - // Which corresponds to the schema - // { "properties" : { "$type" : { "const" : "discriminator" }, "$values" : { "type" : "array", "items" : { ... } } } } - - schemaType = JsonSchemaType.Object; - (properties ??= []).Add(derivedTypeDiscriminator!.Value); - (requiredProperties ??= []).Add((JsonNode)derivedTypeDiscriminator.Value.Key); - - state.Push(PropertiesPropertyName); - state.Push(StjValuesMetadataProperty); - state.Push(ItemsPropertyName); - JsonObject elementSchema = MapJsonSchemaCore(elementTypeInfo, ref state); - state.Pop(); - state.Pop(); - state.Pop(); - - properties.Add( - StjValuesMetadataProperty, - new JsonObject - { - [TypePropertyName] = MapSchemaType(JsonSchemaType.Array), - [ItemsPropertyName] = elementSchema, - }); - } - else - { - schemaType = JsonSchemaType.Array; - - state.Push(ItemsPropertyName); - arrayItems = MapJsonSchemaCore(elementTypeInfo, ref state); - state.Pop(); - } - - break; - - case JsonTypeInfoKind.Dictionary: - schemaType = JsonSchemaType.Object; - Type valueType = GetElementType(typeInfo); - JsonTypeInfo valueTypeInfo = typeInfo.Options.GetTypeInfo(valueType); - - if (emitsTypeDiscriminator) - { - Debug.Assert(derivedTypeDiscriminator?.Value is not null); - (properties ??= []).Add(derivedTypeDiscriminator!.Value); - (requiredProperties ??= []).Add((JsonNode)derivedTypeDiscriminator.Value.Key); - } - - state.Push(AdditionalPropertiesPropertyName); - additionalProperties = MapJsonSchemaCore(valueTypeInfo, ref state); - state.Pop(); - break; - - default: - Debug.Fail("Unreachable code"); - break; - } + private static partial JsonNode MapMethodParameterJsonSchema( + ParameterInfo parameterInfo, + JsonTypeInfo parameterTypeInfo, + JsonSchemaMapperConfiguration configuration, + NullabilityInfoContext nullabilityContext, + out bool isRequired); - if (schemaType != JsonSchemaType.Any && - (type.IsValueType - ? parentNullableOfT is not null - : (isNullableReferenceType || state.Configuration.ReferenceTypeNullability is ReferenceTypeNullability.AlwaysNullable))) + private static void ValidateOptions(JsonSerializerOptions options) + { + if (options.ReferenceHandler == ReferenceHandler.Preserve) { - // Append "null" to the type array in the following cases: - // 1. The type is a nullable value type or - // 2. The type has been inferred to be a nullable reference type annotation or - // 3. The schema generator has been configured to always emit null for reference types (default STJ semantics). - schemaType |= JsonSchemaType.Null; + ThrowHelpers.ThrowNotSupportedException_ReferenceHandlerPreserveNotSupported(); } -ConstructSchemaDocument: - return CreateSchemaDocument( - ref state, - title, - description, - schemaType, - format, - pattern, - properties, - requiredProperties, - arrayItems, - additionalProperties, - enumValues, - anyOfTypes, - hasDefaultValue, - defaultValue); + options.MakeReadOnly(); } private static void ResolveParameterInfo( ParameterInfo parameter, JsonTypeInfo parameterTypeInfo, - ref GenerationState state, + NullabilityInfoContext nullabilityInfoContext, + JsonSchemaMapperConfiguration configuration, + out bool hasDefaultValue, + out JsonNode? defaultValue, + out bool isNonNullable, ref string? description, - ref bool hasDefaultValue, - ref JsonNode? defaultValue, - ref bool isNullableReferenceType, ref bool isRequired) { Debug.Assert(parameterTypeInfo.Type == parameter.ParameterType); - if (state.Configuration.ResolveDescriptionAttributes) + if (configuration.ResolveDescriptionAttributes) { // Resolve parameter-level description attributes. description ??= parameter.GetCustomAttribute()?.Description; } - if (!isNullableReferenceType && state.NullabilityInfoContext is { } ctx) - { - // Consult the nullability annotation of the constructor parameter if available. - isNullableReferenceType = ctx.GetParameterNullability(parameter) is NullabilityState.Nullable; - } + // Incorporate the nullability information from the parameter. + isNonNullable = nullabilityInfoContext.GetParameterNullability(parameter) is NullabilityState.NotNull; if (parameter.HasDefaultValue) { @@ -561,337 +222,194 @@ private static void ResolveParameterInfo( defaultValue = JsonSerializer.SerializeToNode(defaultVal, parameterTypeInfo); hasDefaultValue = true; } - else if (state.Configuration.RequireConstructorParameters) + else { // Parameter is not optional, mark as required. isRequired = true; + defaultValue = null; + hasDefaultValue = false; } } - private ref struct GenerationState + private static NullabilityState GetParameterNullability(this NullabilityInfoContext context, ParameterInfo parameterInfo) { - private readonly JsonSchemaMapperConfiguration _configuration; - private readonly NullabilityInfoContext? _nullabilityInfoContext; - private readonly Dictionary<(Type, JsonConverter? CustomConverter, bool IsNullableReferenceType, JsonNumberHandling? CustomNumberHandling), string>? _generatedTypePaths; - private readonly List? _currentPath; - private int _currentDepth; - - public GenerationState(JsonSchemaMapperConfiguration configuration) - { - _configuration = configuration; - _nullabilityInfoContext = configuration.ReferenceTypeNullability is ReferenceTypeNullability.Annotated ? new() : null; - _generatedTypePaths = configuration.AllowSchemaReferences ? new() : null; - _currentPath = configuration.AllowSchemaReferences ? new() : null; - _currentDepth = 0; - } - - public readonly JsonSchemaMapperConfiguration Configuration => _configuration; - public readonly NullabilityInfoContext? NullabilityInfoContext => _nullabilityInfoContext; - public readonly int CurrentDepth => _currentDepth; - - public void Push(string nodeId) +#if !NET9_0_OR_GREATER + // Workaround for https://github.com/dotnet/runtime/issues/92487 + if (GetGenericParameterDefinition(parameterInfo) is { ParameterType: { IsGenericParameter: true } typeParam }) { - if (_currentDepth == Configuration.MaxDepth) + // Step 1. Look for nullable annotations on the type parameter. + if (GetNullableFlags(typeParam) is byte[] flags) { - ThrowHelpers.ThrowInvalidOperationException_MaxDepthReached(); + return TranslateByte(flags[0]); } - _currentDepth++; + // Step 2. Look for nullable annotations on the generic method declaration. + if (typeParam.DeclaringMethod != null && GetNullableContextFlag(typeParam.DeclaringMethod) is byte flag) + { + return TranslateByte(flag); + } - if (Configuration.AllowSchemaReferences) + // Step 3. Look for nullable annotations on the generic method declaration. + if (GetNullableContextFlag(typeParam.DeclaringType!) is byte flag2) { - Debug.Assert(_currentPath is not null); - _currentPath!.Add(nodeId); + return TranslateByte(flag2); } - } - public void Pop() - { - Debug.Assert(_currentDepth > 0); - _currentDepth--; + // Default to nullable. + return NullabilityState.Nullable; - if (Configuration.AllowSchemaReferences) +#if NETCOREAPP + [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", + Justification = "We're resolving private fields of the built-in enum converter which cannot have been trimmed away.")] +#endif + static byte[]? GetNullableFlags(MemberInfo member) { - Debug.Assert(_currentPath is not null); - _currentPath!.RemoveAt(_currentPath.Count - 1); + Attribute? attr = member.GetCustomAttributes().FirstOrDefault(attr => + { + Type attrType = attr.GetType(); + return attrType.Namespace == "System.Runtime.CompilerServices" && attrType.Name == "NullableAttribute"; + }); + + return (byte[])attr?.GetType().GetField("NullableFlags")?.GetValue(attr)!; } - } - /// - /// Associates the specified type configuration with the current path in the schema. - /// - public readonly void RegisterTypePath(Type type, Type? parentNullableOfT, JsonConverter? customConverter, bool isNullableReferenceType, JsonNumberHandling? customNumberHandling) - { - if (Configuration.AllowSchemaReferences) +#if NETCOREAPP + [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", + Justification = "We're resolving private fields of the built-in enum converter which cannot have been trimmed away.")] +#endif + static byte? GetNullableContextFlag(MemberInfo member) { - Debug.Assert(_currentPath is not null); - Debug.Assert(_generatedTypePaths is not null); + Attribute? attr = member.GetCustomAttributes().FirstOrDefault(attr => + { + Type attrType = attr.GetType(); + return attrType.Namespace == "System.Runtime.CompilerServices" && attrType.Name == "NullableContextAttribute"; + }); - string pointer = _currentDepth == 0 ? "#" : "#/" + string.Join("/", _currentPath); - _generatedTypePaths!.Add((parentNullableOfT ?? type, customConverter, isNullableReferenceType, customNumberHandling), pointer); + return (byte?)attr?.GetType().GetField("Flag")?.GetValue(attr)!; } - } - /// - /// Looks up the schema path for the specified type configuration. - /// - public readonly bool TryGetGeneratedSchemaPath(Type type, Type? parentNullableOfT, JsonConverter? customConverter, bool isNullableReferenceType, JsonNumberHandling? customNumberHandling, [NotNullWhen(true)] out string? value) - { - if (Configuration.AllowSchemaReferences) + static NullabilityState TranslateByte(byte b) { - Debug.Assert(_generatedTypePaths is not null); - return _generatedTypePaths!.TryGetValue((parentNullableOfT ?? type, customConverter, isNullableReferenceType, customNumberHandling), out value); + return b switch + { + 1 => NullabilityState.NotNull, + 2 => NullabilityState.Nullable, + _ => NullabilityState.Unknown + }; } - - value = null; - return false; - } - } - - private static JsonObject CreateSchemaDocument( - ref GenerationState state, - string? title = null, - string? description = null, - JsonSchemaType schemaType = JsonSchemaType.Any, - string? format = null, - string? pattern = null, - JsonObject? properties = null, - JsonArray? requiredProperties = null, - JsonObject? arrayItems = null, - JsonNode? additionalProperties = null, - JsonArray? enumValues = null, - JsonArray? anyOfSchema = null, - bool hasDefaultValue = false, - JsonNode? defaultValue = null) - { - var schema = new JsonObject(); - - if (state.CurrentDepth == 0 && state.Configuration.IncludeSchemaVersion) - { - schema.Add(SchemaPropertyName, SchemaVersion); - } - - if (title is not null) - { - schema.Add(TitlePropertyName, title); } - if (description is not null) + static ParameterInfo GetGenericParameterDefinition(ParameterInfo parameter) { - schema.Add(DescriptionPropertyName, description); - } - - if (MapSchemaType(schemaType) is JsonNode type) - { - schema.Add(TypePropertyName, type); - } + if (parameter.Member is { DeclaringType.IsConstructedGenericType: true } + or MethodInfo { IsGenericMethod: true, IsGenericMethodDefinition: false }) + { + var genericMethod = (MethodBase)GetGenericMemberDefinition(parameter.Member); + return genericMethod.GetParameters()[parameter.Position]; + } - if (format is not null) - { - schema.Add(FormatPropertyName, format); + return parameter; } - if (pattern is not null) +#if NETCOREAPP + [UnconditionalSuppressMessage("Trimming", "IL2075:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", + Justification = "Looking up the generic member definition of the provided member.")] +#endif + static MemberInfo GetGenericMemberDefinition(MemberInfo member) { - schema.Add(PatternPropertyName, pattern); - } + if (member is Type type) + { + return type.IsConstructedGenericType ? type.GetGenericTypeDefinition() : type; + } - if (properties is not null) - { - schema.Add(PropertiesPropertyName, properties); - } + if (member.DeclaringType!.IsConstructedGenericType) + { + const BindingFlags AllMemberFlags = + BindingFlags.Static | BindingFlags.Instance | + BindingFlags.Public | BindingFlags.NonPublic; - if (requiredProperties is not null) - { - schema.Add(RequiredPropertyName, requiredProperties); - } + return member.DeclaringType.GetGenericTypeDefinition() + .GetMember(member.Name, AllMemberFlags) + .First(m => m.MetadataToken == member.MetadataToken); + } - if (arrayItems is not null) - { - schema.Add(ItemsPropertyName, arrayItems); - } + if (member is MethodInfo { IsGenericMethod: true, IsGenericMethodDefinition: false } method) + { + return method.GetGenericMethodDefinition(); + } - if (additionalProperties is not null) - { - schema.Add(AdditionalPropertiesPropertyName, additionalProperties); + return member; } +#endif + return context.Create(parameterInfo).WriteState; + } - if (enumValues is not null) - { - schema.Add(EnumPropertyName, enumValues); - } + // Taken from https://github.com/dotnet/runtime/blob/903bc019427ca07080530751151ea636168ad334/src/libraries/System.Text.Json/Common/ReflectionExtensions.cs#L288-L317 + private static object? GetNormalizedDefaultValue(this ParameterInfo parameterInfo) + { + Type parameterType = parameterInfo.ParameterType; + object? defaultValue = parameterInfo.DefaultValue; - if (anyOfSchema is not null) + if (defaultValue is null) { - schema.Add(AnyOfPropertyName, anyOfSchema); + return null; } - if (hasDefaultValue) + // DBNull.Value is sometimes used as the default value (returned by reflection) of nullable params in place of null. + if (defaultValue == DBNull.Value && parameterType != typeof(DBNull)) { - schema.Add(DefaultPropertyName, defaultValue); + return null; } - return schema; - } - - [Flags] - private enum JsonSchemaType - { - Any = 0, // No type declared on the schema - Null = 1, - Boolean = 2, - Integer = 4, - Number = 8, - String = 16, - Array = 32, - Object = 64, - } - - private static readonly JsonSchemaType[] s_schemaValues = - [ - // NB the order of these values influences order of types in the rendered schema - JsonSchemaType.String, - JsonSchemaType.Integer, - JsonSchemaType.Number, - JsonSchemaType.Boolean, - JsonSchemaType.Array, - JsonSchemaType.Object, - JsonSchemaType.Null, - ]; - - private static JsonNode? MapSchemaType(JsonSchemaType schemaType) - { - return schemaType switch - { - JsonSchemaType.Any => null, - JsonSchemaType.Null => "null", - JsonSchemaType.Boolean => "boolean", - JsonSchemaType.Integer => "integer", - JsonSchemaType.Number => "number", - JsonSchemaType.String => "string", - JsonSchemaType.Array => "array", - JsonSchemaType.Object => "object", - _ => MapCompositeSchemaType(schemaType), - }; - - static JsonArray MapCompositeSchemaType(JsonSchemaType schemaType) + // Default values of enums or nullable enums are represented using the underlying type and need to be cast explicitly + // cf. https://github.com/dotnet/runtime/issues/68647 + if (parameterType.IsEnum) { - var array = new JsonArray(); - foreach (JsonSchemaType type in s_schemaValues) - { - if ((schemaType & type) != 0) - { - array.Add(MapSchemaType(type)); - } - } - - return array; + return Enum.ToObject(parameterType, defaultValue); } - } - private const string SchemaPropertyName = "$schema"; - private const string RefPropertyName = "$ref"; - private const string TitlePropertyName = "title"; - private const string DescriptionPropertyName = "description"; - private const string TypePropertyName = "type"; - private const string FormatPropertyName = "format"; - private const string PatternPropertyName = "pattern"; - private const string PropertiesPropertyName = "properties"; - private const string RequiredPropertyName = "required"; - private const string ItemsPropertyName = "items"; - private const string AdditionalPropertiesPropertyName = "additionalProperties"; - private const string EnumPropertyName = "enum"; - private const string AnyOfPropertyName = "anyOf"; - private const string ConstPropertyName = "const"; - private const string DefaultPropertyName = "default"; - private const string StjValuesMetadataProperty = "$values"; - - private readonly struct SimpleTypeJsonSchema - { - public SimpleTypeJsonSchema(JsonSchemaType schemaType, string? format = null, string? pattern = null) + if (Nullable.GetUnderlyingType(parameterType) is Type underlyingType && underlyingType.IsEnum) { - SchemaType = schemaType; - Format = format; - Pattern = pattern; + return Enum.ToObject(underlyingType, defaultValue); } - public JsonSchemaType SchemaType { get; } - public string? Format { get; } - public string? Pattern { get; } + return defaultValue; } - private static readonly Dictionary s_simpleTypeInfo = new() + private static class JsonSchemaConstants { - [typeof(object)] = new(JsonSchemaType.Any), - [typeof(bool)] = new(JsonSchemaType.Boolean), - [typeof(byte)] = new(JsonSchemaType.Integer), - [typeof(ushort)] = new(JsonSchemaType.Integer), - [typeof(uint)] = new(JsonSchemaType.Integer), - [typeof(ulong)] = new(JsonSchemaType.Integer), - [typeof(sbyte)] = new(JsonSchemaType.Integer), - [typeof(short)] = new(JsonSchemaType.Integer), - [typeof(int)] = new(JsonSchemaType.Integer), - [typeof(long)] = new(JsonSchemaType.Integer), - [typeof(float)] = new(JsonSchemaType.Number), - [typeof(double)] = new(JsonSchemaType.Number), - [typeof(decimal)] = new(JsonSchemaType.Number), -#if NET6_0_OR_GREATER - [typeof(Half)] = new(JsonSchemaType.Number), -#endif -#if NET7_0_OR_GREATER - [typeof(UInt128)] = new(JsonSchemaType.Integer), - [typeof(Int128)] = new(JsonSchemaType.Integer), -#endif - [typeof(char)] = new(JsonSchemaType.String), - [typeof(string)] = new(JsonSchemaType.String), - [typeof(byte[])] = new(JsonSchemaType.String), - [typeof(Memory)] = new(JsonSchemaType.String), - [typeof(ReadOnlyMemory)] = new(JsonSchemaType.String), - [typeof(DateTime)] = new(JsonSchemaType.String, format: "date-time"), - [typeof(DateTimeOffset)] = new(JsonSchemaType.String, format: "date-time"), - - // TimeSpan is represented as a string in the format "[-][d.]hh:mm:ss[.fffffff]". - [typeof(TimeSpan)] = new(JsonSchemaType.String, pattern: @"^-?(\d+\.)?\d{2}:\d{2}:\d{2}(\.\d{1,7})?$"), -#if NET6_0_OR_GREATER - [typeof(DateOnly)] = new(JsonSchemaType.String, format: "date"), - [typeof(TimeOnly)] = new(JsonSchemaType.String, format: "time"), -#endif - [typeof(Guid)] = new(JsonSchemaType.String, format: "uuid"), - [typeof(Uri)] = new(JsonSchemaType.String, format: "uri"), - [typeof(Version)] = new(JsonSchemaType.String), - [typeof(JsonDocument)] = new(JsonSchemaType.Any), - [typeof(JsonElement)] = new(JsonSchemaType.Any), - [typeof(JsonNode)] = new(JsonSchemaType.Any), - [typeof(JsonValue)] = new(JsonSchemaType.Any), - [typeof(JsonObject)] = new(JsonSchemaType.Object), - [typeof(JsonArray)] = new(JsonSchemaType.Array), - }; - - private static void ValidateOptions(JsonSerializerOptions options) - { - if (options.ReferenceHandler == ReferenceHandler.Preserve) - { - ThrowHelpers.ThrowNotSupportedException_ReferenceHandlerPreserveNotSupported(); - } - - options.MakeReadOnly(); + public const string SchemaPropertyName = "$schema"; + public const string RefPropertyName = "$ref"; + public const string CommentPropertyName = "$comment"; + public const string TitlePropertyName = "title"; + public const string DescriptionPropertyName = "description"; + public const string TypePropertyName = "type"; + public const string FormatPropertyName = "format"; + public const string PatternPropertyName = "pattern"; + public const string PropertiesPropertyName = "properties"; + public const string RequiredPropertyName = "required"; + public const string ItemsPropertyName = "items"; + public const string AdditionalPropertiesPropertyName = "additionalProperties"; + public const string EnumPropertyName = "enum"; + public const string NotPropertyName = "not"; + public const string AnyOfPropertyName = "anyOf"; + public const string ConstPropertyName = "const"; + public const string DefaultPropertyName = "default"; + public const string MinLengthPropertyName = "minLength"; + public const string MaxLengthPropertyName = "maxLength"; } - private static class ThrowHelpers + private static partial class ThrowHelpers { [DoesNotReturn] public static void ThrowArgumentNullException(string name) => throw new ArgumentNullException(name); - [DoesNotReturn] - public static void ThrowNotSupportedException_ReferenceHandlerPreserveNotSupported() => - throw new NotSupportedException("Schema generation not supported with ReferenceHandler.Preserve enabled."); - [DoesNotReturn] public static void ThrowInvalidOperationException_TrimmedMethodParameters(MethodBase method) => throw new InvalidOperationException($"The parameters for method '{method}' have been trimmed away."); [DoesNotReturn] - public static void ThrowInvalidOperationException_MaxDepthReached() => - throw new InvalidOperationException("The maximum depth of the schema has been reached."); + public static void ThrowNotSupportedException_ReferenceHandlerPreserveNotSupported() => + throw new NotSupportedException("Schema generation not supported with ReferenceHandler.Preserve enabled."); } } diff --git a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapperConfiguration.cs b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapperConfiguration.cs index 2bffb91b0e0c..3e6edfec8b50 100644 --- a/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapperConfiguration.cs +++ b/dotnet/src/InternalUtilities/src/Schema/JsonSchemaMapperConfiguration.cs @@ -1,8 +1,12 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +// Source copied from https://github.com/eiriktsarpalis/stj-schema-mapper +// It should be kept in sync with any changes made in that repo, +// and should be removed once the relevant replacements are available in STJv9. + using System; using System.ComponentModel; -using System.Diagnostics.CodeAnalysis; +using System.Text.Json.Nodes; namespace JsonSchemaMapper; @@ -10,28 +14,17 @@ namespace JsonSchemaMapper; /// Controls the behavior of the class. /// #if EXPOSE_JSON_SCHEMA_MAPPER - public +public #else -[ExcludeFromCodeCoverage] internal #endif -class JsonSchemaMapperConfiguration + class JsonSchemaMapperConfiguration { /// /// Gets the default configuration object used by . /// public static JsonSchemaMapperConfiguration Default { get; } = new(); - private readonly int _maxDepth = 64; - - /// - /// Determines whether schema references using JSON pointers should be generated for repeated complex types. - /// - /// - /// Defaults to . Should be left enabled if recursive types (e.g. trees, linked lists) are expected. - /// - public bool AllowSchemaReferences { get; init; } = true; - /// /// Determines whether the '$schema' property should be included in the root schema document. /// @@ -49,45 +42,25 @@ class JsonSchemaMapperConfiguration public bool ResolveDescriptionAttributes { get; init; } = true; /// - /// Determines the nullability behavior of reference types in the generated schema. + /// Specifies whether the type keyword should be included in enum type schemas. /// /// - /// Defaults to . Currently JsonSerializer - /// doesn't recognize non-nullable reference types (https://github.com/dotnet/runtime/issues/1256) - /// so the serializer will always treat them as nullable. Setting to - /// improves accuracy of the generated schema with respect to the actual serialization behavior but can result in more noise. + /// Defaults to false. /// - public ReferenceTypeNullability ReferenceTypeNullability { get; init; } = ReferenceTypeNullability.Annotated; + public bool IncludeTypeInEnums { get; init; } /// - /// Dtermines whether properties bound to non-optional constructor parameters should be flagged as required. + /// Determines whether non-nullable schemas should be generated for null oblivious reference types. /// /// - /// Defaults to true. Current STJ treats all constructor parameters as optional - /// (https://github.com/dotnet/runtime/issues/100075) so disabling this option - /// will generate schemas that are more compatible with the actual serialization behavior. + /// Defaults to . Due to restrictions in the run-time representation of nullable reference types + /// most occurrences are null oblivious and are treated as nullable by the serializer. A notable exception to that rule + /// are nullability annotations of field, property and constructor parameters which are represented in the contract metadata. /// - public bool RequireConstructorParameters { get; init; } = true; + public bool TreatNullObliviousAsNonNullable { get; init; } /// - /// Determines the maximum permitted depth when traversing the generated type graph. + /// Defines a callback that is invoked for every schema that is generated within the type graph. /// - /// Thrown when the value is less than 0. - /// - /// Defaults to 64. - /// - public int MaxDepth - { - get => _maxDepth; - init - { - if (value < 0) - { - Throw(); - static void Throw() => throw new ArgumentOutOfRangeException(nameof(value)); - } - - _maxDepth = value; - } - } + public Func? TransformSchemaNode { get; init; } } diff --git a/dotnet/src/InternalUtilities/src/Schema/KernelJsonSchemaBuilder.cs b/dotnet/src/InternalUtilities/src/Schema/KernelJsonSchemaBuilder.cs index 9fa11e616c5a..416b77485d2e 100644 --- a/dotnet/src/InternalUtilities/src/Schema/KernelJsonSchemaBuilder.cs +++ b/dotnet/src/InternalUtilities/src/Schema/KernelJsonSchemaBuilder.cs @@ -1,6 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Nodes; using System.Text.Json.Serialization; @@ -16,23 +18,43 @@ namespace Microsoft.SemanticKernel; // 1) Use the JSO from the Kernel used to create the KernelFunction when constructing the schema // 2) Check when the schema is being used (e.g. function calling) whether the JSO being used is equivalent to // whichever was used to build the schema, and if it's not, generate a new schema for that JSO - +[ExcludeFromCodeCoverage] internal static class KernelJsonSchemaBuilder { private static readonly JsonSerializerOptions s_options = CreateDefaultOptions(); - private static readonly JsonSchemaMapperConfiguration s_config = new() { IncludeSchemaVersion = false }; + private static readonly JsonSchemaMapperConfiguration s_config = new() + { + IncludeSchemaVersion = false, + IncludeTypeInEnums = true, + TreatNullObliviousAsNonNullable = true, + }; - public static KernelJsonSchema Build(JsonSerializerOptions? options, Type type, string? description = null) + public static KernelJsonSchema Build( + JsonSerializerOptions? options, + Type type, + string? description = null, + JsonSchemaMapperConfiguration? configuration = null) { - options ??= s_options; + var serializerOptions = options ?? s_options; + var mapperConfiguration = configuration ?? s_config; + + JsonNode jsonSchema = serializerOptions.GetJsonSchema(type, mapperConfiguration); + Debug.Assert(jsonSchema.GetValueKind() is JsonValueKind.Object or JsonValueKind.False or JsonValueKind.True); + + if (jsonSchema is not JsonObject jsonObj) + { + // Transform boolean schemas into object equivalents. + jsonObj = jsonSchema.GetValue() + ? new JsonObject() + : new JsonObject { ["not"] = true }; + } - JsonObject jsonObj = options.GetJsonSchema(type, s_config); if (!string.IsNullOrWhiteSpace(description)) { jsonObj["description"] = description; } - return KernelJsonSchema.Parse(JsonSerializer.Serialize(jsonObj, options)); + return KernelJsonSchema.Parse(jsonObj.ToJsonString(serializerOptions)); } private static JsonSerializerOptions CreateDefaultOptions() diff --git a/dotnet/src/InternalUtilities/src/Schema/README.md b/dotnet/src/InternalUtilities/src/Schema/README.md index 6a22bac7b896..0ddddcbd1ac1 100644 --- a/dotnet/src/InternalUtilities/src/Schema/README.md +++ b/dotnet/src/InternalUtilities/src/Schema/README.md @@ -1,5 +1,5 @@ The *.cs files in this folder, other than KernelJsonSchemaBuilder.cs, are a direct copy of the code at -https://github.com/eiriktsarpalis/stj-schema-mapper/tree/b7d7f5a3794e48c45e2b5b0ab050d89aabfc94d6/src/JsonSchemaMapper. +https://github.com/eiriktsarpalis/stj-schema-mapper/tree/94b6d9b979f1a80a1c305605dfc6de3b7a6fe78b/src/JsonSchemaMapper. They should be kept in sync with any changes made in that repo, and should be removed once the relevant replacements are available in System.Text.Json. EXPOSE_JSON_SCHEMA_MAPPER should _not_ be defined so as to keep all of the functionality internal. diff --git a/dotnet/src/InternalUtilities/src/Schema/ReferenceTypeNullability.cs b/dotnet/src/InternalUtilities/src/Schema/ReferenceTypeNullability.cs deleted file mode 100644 index d373e9eeba64..000000000000 --- a/dotnet/src/InternalUtilities/src/Schema/ReferenceTypeNullability.cs +++ /dev/null @@ -1,30 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -namespace JsonSchemaMapper; - -/// -/// Controls the nullable behavior of reference types in the generated schema. -/// -#if EXPOSE_JSON_SCHEMA_MAPPER - public -#else -internal -#endif -enum ReferenceTypeNullability -{ - /// - /// Always treat reference types as nullable. Follows the built-in behavior - /// of the serializer (cf. https://github.com/dotnet/runtime/issues/1256). - /// - AlwaysNullable, - - /// - /// Treat reference types as nullable only if they are annotated with a nullable reference type modifier. - /// - Annotated, - - /// - /// Always treat reference types as non-nullable. - /// - NeverNullable, -} diff --git a/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs b/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs index c8278b4b06e5..d9b10d4a559a 100644 --- a/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs +++ b/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs @@ -15,16 +15,10 @@ namespace Microsoft.SemanticKernel.Text; [ExcludeFromCodeCoverage] internal static class JsonOptionsCache { - /// Singleton for . - public static ReadOnlyMemoryConverter ReadOnlyMemoryConverter { get; } = new(); - /// /// Cached instance for reading and writing JSON using the default settings. /// - public static JsonSerializerOptions Default { get; } = new() - { - Converters = { ReadOnlyMemoryConverter }, - }; + public static JsonSerializerOptions Default { get; } = new(); /// /// Cached instance for writing JSON with indentation. @@ -32,7 +26,6 @@ internal static class JsonOptionsCache public static JsonSerializerOptions WriteIndented { get; } = new() { WriteIndented = true, - Converters = { ReadOnlyMemoryConverter }, }; /// @@ -44,6 +37,5 @@ internal static class JsonOptionsCache AllowTrailingCommas = true, PropertyNameCaseInsensitive = true, ReadCommentHandling = JsonCommentHandling.Skip, - Converters = { ReadOnlyMemoryConverter }, }; } diff --git a/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs b/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs deleted file mode 100644 index 1a754aa5524e..000000000000 --- a/dotnet/src/InternalUtilities/src/Text/ReadOnlyMemoryConverter.cs +++ /dev/null @@ -1,33 +0,0 @@ -๏ปฟ// Copyright (c) Microsoft. All rights reserved. - -using System; -using System.Diagnostics.CodeAnalysis; -using System.Runtime.InteropServices; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace Microsoft.SemanticKernel.Text; - -// .NET 8 and the System.Text.Json v8.0.0 nuget package include built-in support for ReadOnlyMemory. -// This is a temporary workaround for .NET 6 and the System.Text.Json v6.0.0 nuget package. -// It should be removed once SK projects upgrade to System.Text.Json v8.0.0. - -/// Provides a converter for . -[ExcludeFromCodeCoverage] -internal sealed class ReadOnlyMemoryConverter : JsonConverter> -{ - /// An instance of a converter for float[] that all operations delegate to. - private static readonly JsonConverter s_arrayConverter = (JsonConverter)new JsonSerializerOptions().GetConverter(typeof(float[])); - - public override ReadOnlyMemory Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) => - s_arrayConverter.Read(ref reader, typeof(float[]), options).AsMemory(); - - public override void Write(Utf8JsonWriter writer, ReadOnlyMemory value, JsonSerializerOptions options) => - // This provides an efficient implementation when the ReadOnlyMemory represents the full length of an array. - // This is the common case for these projects, and thus the implementation doesn't spend more code on a complex - // implementation to efficiently handle slices or instances backed by MemoryManagers. - s_arrayConverter.Write( - writer, - MemoryMarshal.TryGetArray(value, out ArraySegment array) && array.Count == value.Length ? array.Array! : value.ToArray(), - options); -} diff --git a/dotnet/src/InternalUtilities/test/AssertExtensions.cs b/dotnet/src/InternalUtilities/test/AssertExtensions.cs index cf201d169366..4caf63589cbc 100644 --- a/dotnet/src/InternalUtilities/test/AssertExtensions.cs +++ b/dotnet/src/InternalUtilities/test/AssertExtensions.cs @@ -1,7 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; -using Xunit; +using Assert = Xunit.Assert; namespace SemanticKernel.UnitTests; diff --git a/dotnet/src/InternalUtilities/test/MoqExtensions.cs b/dotnet/src/InternalUtilities/test/MoqExtensions.cs new file mode 100644 index 000000000000..8fb435e288f9 --- /dev/null +++ b/dotnet/src/InternalUtilities/test/MoqExtensions.cs @@ -0,0 +1,22 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using Microsoft.Extensions.Logging; +using Moq; + +#pragma warning disable CS8620 // Argument cannot be used for parameter due to differences in the nullability of reference types. + +internal static class MoqExtensions +{ + public static void VerifyLog(this Mock> logger, LogLevel logLevel, string message, Times times) + { + logger.Verify( + x => x.Log( + It.Is(l => l == logLevel), + It.IsAny(), + It.Is((v, t) => v.ToString()!.Contains(message)), + It.IsAny(), + It.IsAny>()), + times); + } +} diff --git a/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs index 5deb0c5dbd20..fe31efb14bae 100644 --- a/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs +++ b/dotnet/src/Planners/Planners.OpenAI/Stepwise/FunctionCallingStepwisePlanner.cs @@ -113,7 +113,7 @@ private async Task ExecuteCoreAsync( // Check for final answer in the function response foreach (OpenAIFunctionToolCall functionResponse in functionResponses) { - if (this.TryFindFinalAnswer(functionResponse, stepExecutionSettings.ToolCallBehavior, out string finalAnswer, out string? finalAnswerError)) + if (this.TryFindFinalAnswer(functionResponse, out string finalAnswer, out string? finalAnswerError)) { if (finalAnswerError is not null) { @@ -142,7 +142,7 @@ private async Task ExecuteCoreAsync( { // Execute function and add to result to chat history var result = (await clonedKernel.InvokeAsync(pluginFunction, arguments, cancellationToken).ConfigureAwait(false)).GetValue(); - chatHistoryForSteps.AddMessage(AuthorRole.Tool, ParseObjectAsString(result, stepExecutionSettings.ToolCallBehavior), metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, functionResponse.Id } }); + chatHistoryForSteps.AddMessage(AuthorRole.Tool, ParseObjectAsString(result), metadata: new Dictionary(1) { { OpenAIChatMessageContent.ToolIdProperty, functionResponse.Id } }); } catch (Exception ex) when (!ex.IsCriticalException()) { @@ -174,7 +174,7 @@ private async Task GetCompletionWithFunctionsAsync( ILogger logger, CancellationToken cancellationToken) { - openAIExecutionSettings.ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions; + openAIExecutionSettings.FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false); await this.ValidateTokenCountAsync(chatHistory, kernel, logger, openAIExecutionSettings, cancellationToken).ConfigureAwait(false); return await chatCompletion.GetChatMessageContentAsync(chatHistory, openAIExecutionSettings, kernel, cancellationToken).ConfigureAwait(false); @@ -241,7 +241,7 @@ private bool TryGetFunctionResponse(ChatMessageContent chatMessage, [NotNullWhen return functionResponses is { Count: > 0 }; } - private bool TryFindFinalAnswer(OpenAIFunctionToolCall functionResponse, ToolCallBehavior? toolCallBehavior, out string finalAnswer, out string? errorMessage) + private bool TryFindFinalAnswer(OpenAIFunctionToolCall functionResponse, out string finalAnswer, out string? errorMessage) { finalAnswer = string.Empty; errorMessage = null; @@ -250,7 +250,7 @@ private bool TryFindFinalAnswer(OpenAIFunctionToolCall functionResponse, ToolCal { if (functionResponse.Arguments is { Count: > 0 } arguments && arguments.TryGetValue("answer", out object? valueObj)) { - finalAnswer = ParseObjectAsString(valueObj, toolCallBehavior); + finalAnswer = ParseObjectAsString(valueObj); } else { @@ -261,7 +261,7 @@ private bool TryFindFinalAnswer(OpenAIFunctionToolCall functionResponse, ToolCal return false; } - private static string ParseObjectAsString(object? valueObj, ToolCallBehavior? toolCallBehavior) + private static string ParseObjectAsString(object? valueObj) { string resultStr = string.Empty; @@ -291,7 +291,7 @@ private static string ParseObjectAsString(object? valueObj, ToolCallBehavior? to else { #pragma warning disable CS0618 // Type or member is obsolete - resultStr = JsonSerializer.Serialize(valueObj, toolCallBehavior?.ToolCallResultSerializerOptions); + resultStr = JsonSerializer.Serialize(valueObj); #pragma warning restore CS0618 // Type or member is obsolete } @@ -310,7 +310,7 @@ private async Task ValidateTokenCountAsync( string functionManual = string.Empty; // If using functions, get the functions manual to include in token count estimate - if (openAIExecutionSettings.ToolCallBehavior == ToolCallBehavior.EnableKernelFunctions) + if (openAIExecutionSettings.FunctionChoiceBehavior is not null) { functionManual = await this.GetFunctionsManualAsync(kernel, logger, cancellationToken).ConfigureAwait(false); } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs index a452d979c4f5..e96f1272b32f 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs @@ -48,7 +48,7 @@ public static Task> GetChatMessageContentsAsyn /// /// Get a single chat message content for the prompt and settings. /// - /// The target IChatCompletionSErvice interface to extend. + /// The target interface to extend. /// The standardized prompt input. /// The AI execution settings (optional). /// The containing services, plugins, and other state for use throughout the operation. @@ -66,7 +66,7 @@ public static async Task GetChatMessageContentAsync( /// /// Get a single chat message content for the chat history and settings provided. /// - /// The target IChatCompletionService interface to extend. + /// The target interface to extend. /// The chat history to complete. /// The AI execution settings (optional). /// The containing services, plugins, and other state for use throughout the operation. @@ -85,7 +85,7 @@ public static async Task GetChatMessageContentAsync( /// Get streaming chat message contents for the chat history provided using the specified settings. /// /// Throws if the specified type is not the same or fail to cast - /// The target IChatCompletionService interface to extend. + /// The target interface to extend. /// The standardized prompt input. /// The AI execution settings (optional). /// The containing services, plugins, and other state for use throughout the operation. diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs index c9cae7acb070..12d63de28d3c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatPromptParser.cs @@ -75,7 +75,14 @@ private static ChatMessageContent ParseChatNode(PromptNode node) { if (childNode.TagName.Equals(ImageTagName, StringComparison.OrdinalIgnoreCase)) { - items.Add(new ImageContent(new Uri(childNode.Content!))); + if (childNode.Content!.StartsWith("data:", StringComparison.OrdinalIgnoreCase)) + { + items.Add(new ImageContent(childNode.Content)); + } + else + { + items.Add(new ImageContent(new Uri(childNode.Content!))); + } } else if (childNode.TagName.Equals(TextTagName, StringComparison.OrdinalIgnoreCase)) { diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/AutoFunctionChoiceBehavior.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/AutoFunctionChoiceBehavior.cs new file mode 100644 index 000000000000..cb08f8271a8c --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/AutoFunctionChoiceBehavior.cs @@ -0,0 +1,72 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents a that provides either all of the 's plugins' functions to AI model to call or specified ones. +/// This behavior allows the model to decide whether to call the functions and, if so, which ones to call. +/// +internal sealed class AutoFunctionChoiceBehavior : FunctionChoiceBehavior +{ + /// + /// Indicates whether the functions should be automatically invoked by AI connectors. + /// + private readonly bool _autoInvoke = true; + + /// + /// Initializes a new instance of the class. + /// + [JsonConstructor] + public AutoFunctionChoiceBehavior() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// + /// Functions to provide to AI model. If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + /// + /// Indicates whether the functions should be automatically invoked by AI connectors. + /// + /// The behavior options. + public AutoFunctionChoiceBehavior(IEnumerable? functions = null, bool autoInvoke = true, FunctionChoiceBehaviorOptions? options = null) : base(functions) + { + this.Functions = functions?.Select(f => FunctionName.ToFullyQualifiedName(f.Name, f.PluginName, FunctionNameSeparator)).ToList(); + this._autoInvoke = autoInvoke; + this.Options = options; + } + + /// + /// Fully qualified names of the functions to provide to AI model. + /// If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + [JsonPropertyName("functions")] + public IList? Functions { get; set; } + + /// + /// The behavior options. + /// + [JsonPropertyName("options")] + public FunctionChoiceBehaviorOptions? Options { get; set; } + + /// + public override FunctionChoiceBehaviorConfiguration GetConfiguration(FunctionChoiceBehaviorConfigurationContext context) + { + var functions = base.GetFunctions(this.Functions, context.Kernel, this._autoInvoke); + + return new FunctionChoiceBehaviorConfiguration(this.Options ?? DefaultOptions) + { + Choice = FunctionChoice.Auto, + Functions = functions, + AutoInvoke = this._autoInvoke, + }; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoice.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoice.cs new file mode 100644 index 000000000000..14daa4b303c5 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoice.cs @@ -0,0 +1,79 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents an AI model's decision-making strategy for calling functions, offering predefined choices: Auto, Required, and None. +/// Auto allows the model to decide if and which functions to call, Required enforces calling one or more functions, and None prevents any function calls, generating only a user-facing message. +/// +[Experimental("SKEXP0001")] +public readonly struct FunctionChoice : IEquatable +{ + /// + /// This choice instructs the model to decide whether to call the functions or not and, if so, which ones to call. + /// + public static FunctionChoice Auto { get; } = new("auto"); + + /// + /// This choice forces the model to always call one or more functions. The model will then select which function(s) to call. + /// + public static FunctionChoice Required { get; } = new("required"); + + /// + /// This behavior forces the model to not call any functions and only generate a user-facing message. + /// + public static FunctionChoice None { get; } = new("none"); + + /// + /// Gets the label associated with this FunctionChoice. + /// + public string Label { get; } + + /// + /// Creates a new FunctionChoice instance with the provided label. + /// + /// The label to associate with this FunctionChoice. + public FunctionChoice(string label) + { + Verify.NotNullOrWhiteSpace(label, nameof(label)); + this.Label = label!; + } + + /// + /// Returns a value indicating whether two FunctionChoice instances are equivalent, as determined by a + /// case-insensitive comparison of their labels. + /// + /// the first FunctionChoice instance to compare + /// the second FunctionChoice instance to compare + /// true if left and right are both null or have equivalent labels; false otherwise + public static bool operator ==(FunctionChoice left, FunctionChoice right) + => left.Equals(right); + + /// + /// Returns a value indicating whether two FunctionChoice instances are not equivalent, as determined by a + /// case-insensitive comparison of their labels. + /// + /// the first FunctionChoice instance to compare + /// the second FunctionChoice instance to compare + /// false if left and right are both null or have equivalent labels; true otherwise + public static bool operator !=(FunctionChoice left, FunctionChoice right) + => !(left == right); + + /// + public override bool Equals([NotNullWhen(true)] object? obj) + => obj is FunctionChoice other && this == other; + + /// + public bool Equals(FunctionChoice other) + => string.Equals(this.Label, other.Label, StringComparison.OrdinalIgnoreCase); + + /// + public override int GetHashCode() + => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label); + + /// + public override string ToString() => this.Label; +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehavior.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehavior.cs new file mode 100644 index 000000000000..eb253809ca22 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehavior.cs @@ -0,0 +1,181 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents the base class for different function choice behaviors. +/// These behaviors define the way functions are chosen by AI model and various aspects of their invocation by AI connectors. +/// +[Experimental("SKEXP0001")] +[JsonPolymorphic(TypeDiscriminatorPropertyName = "type")] +[JsonDerivedType(typeof(AutoFunctionChoiceBehavior), typeDiscriminator: "auto")] +[JsonDerivedType(typeof(RequiredFunctionChoiceBehavior), typeDiscriminator: "required")] +[JsonDerivedType(typeof(NoneFunctionChoiceBehavior), typeDiscriminator: "none")] +public abstract class FunctionChoiceBehavior +{ + /// The separator used to separate plugin name and function name. + protected const string FunctionNameSeparator = "."; + + /// The behavior default options. + protected static readonly FunctionChoiceBehaviorOptions DefaultOptions = new(); + + /// + /// List of the functions to provide to AI model. + /// + private readonly IEnumerable? _functions; + + /// + /// Creates a new instance of the class. + /// + internal FunctionChoiceBehavior() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// + /// Functions to provide to AI model. If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model. + /// + internal FunctionChoiceBehavior(IEnumerable? functions = null) + { + this._functions = functions; + } + + /// + /// Gets an instance of the that provides either all of the 's plugins' functions to the AI model to call or specified ones. + /// This behavior allows the model to decide whether to call the functions and, if so, which ones to call. + /// + /// + /// Functions to provide to the model. If null, all of the 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + /// + /// Indicates whether the functions should be automatically invoked by AI connectors. + /// + /// The behavior options. + /// An instance of one of the . + public static FunctionChoiceBehavior Auto(IEnumerable? functions = null, bool autoInvoke = true, FunctionChoiceBehaviorOptions? options = null) + { + return new AutoFunctionChoiceBehavior(functions, autoInvoke, options); + } + + /// + /// Gets an instance of the that provides either all of the 's plugins' functions to the AI model to call or specified ones. + /// This behavior forces the model to call the provided functions. SK connectors will invoke a requested function or multiple requested functions if the model requests multiple ones in one request, while handling the first request, and stop advertising the functions for the following requests to prevent the model from repeatedly calling the same function(s). + /// + /// + /// Functions to provide to the model. If null, all of the 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + /// + /// Indicates whether the functions should be automatically invoked by AI connectors. + /// + /// The behavior options. + /// An instance of one of the . + public static FunctionChoiceBehavior Required(IEnumerable? functions = null, bool autoInvoke = true, FunctionChoiceBehaviorOptions? options = null) + { + return new RequiredFunctionChoiceBehavior(functions, autoInvoke, options); + } + + /// + /// Gets an instance of the that provides either all of the 's plugins' functions to AI model to call or specified ones but instructs it not to call any of them. + /// The model may use the provided function in the response it generates. E.g. the model may describe which functions it would call and with what parameter values. + /// This response is useful if the user should first validate what functions the model will use. + /// + /// + /// Functions to provide to the model. If null, all of the 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model. + /// + /// The behavior options. + /// An instance of one of the . + public static FunctionChoiceBehavior None(IEnumerable? functions = null, FunctionChoiceBehaviorOptions? options = null) + { + return new NoneFunctionChoiceBehavior(functions, options); + } + + /// + /// Returns the configuration used by AI connectors to determine function choice and invocation behavior. + /// + /// The context provided by AI connectors, used to determine the configuration. + /// The configuration. +#pragma warning disable SKEXP0001 // FunctionChoiceBehavior is an experimental feature and is subject to change in future updates. Suppress this diagnostic to proceed. + public abstract FunctionChoiceBehaviorConfiguration GetConfiguration(FunctionChoiceBehaviorConfigurationContext context); +#pragma warning restore SKEXP0001 // FunctionChoiceBehavior is an experimental feature and is subject to change in future updates. Suppress this diagnostic to proceed. + + /// + /// Returns functions AI connector should provide to the AI model. + /// + /// Functions provided as fully qualified names. + /// The to be used for function calling. + /// Indicates whether the functions should be automatically invoked by the AI connector. + /// The configuration. + protected IReadOnlyList? GetFunctions(IList? functionFQNs, Kernel? kernel, bool autoInvoke) + { + // If auto-invocation is specified, we need a kernel to be able to invoke the functions. + // Lack of a kernel is fatal: we don't want to tell the model we can handle the functions + // and then fail to do so, so we fail before we get to that point. This is an error + // on the consumers behalf: if they specify auto-invocation with any functions, they must + // specify the kernel and the kernel must contain those functions. + if (autoInvoke && kernel is null) + { + throw new KernelException("Auto-invocation is not supported when no kernel is provided."); + } + + List? availableFunctions = null; + + if (functionFQNs is { Count: > 0 }) + { + availableFunctions = new List(functionFQNs.Count); + + foreach (var functionFQN in functionFQNs) + { + var nameParts = FunctionName.Parse(functionFQN, FunctionNameSeparator); + + // Look up the function in the kernel. + if (kernel is not null && kernel.Plugins.TryGetFunction(nameParts.PluginName, nameParts.Name, out var function)) + { + availableFunctions.Add(function); + continue; + } + + // If auto-invocation is requested and no function is found in the kernel, fail early. + if (autoInvoke) + { + throw new KernelException($"The specified function {functionFQN} is not available in the kernel."); + } + + // Look up the function in the list of functions provided as instances of KernelFunction. + function = this._functions?.FirstOrDefault(f => f.Name == nameParts.Name && f.PluginName == nameParts.PluginName); + if (function is not null) + { + availableFunctions.Add(function); + continue; + } + + throw new KernelException($"The specified function {functionFQN} was not found."); + } + } + // Disable function calling. + else if (functionFQNs is { Count: 0 }) + { + return availableFunctions; + } + // Provide all kernel functions. + else if (kernel is not null) + { + foreach (var plugin in kernel.Plugins) + { + (availableFunctions ??= new List(kernel.Plugins.Count)).AddRange(plugin); + } + } + + return availableFunctions; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorConfiguration.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorConfiguration.cs new file mode 100644 index 000000000000..4e6664f4e7fc --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorConfiguration.cs @@ -0,0 +1,42 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents function choice behavior configuration produced by a . +/// +[Experimental("SKEXP0001")] +public sealed class FunctionChoiceBehaviorConfiguration +{ + /// + /// Creates a new instance of the class. + /// The behavior options. + /// + internal FunctionChoiceBehaviorConfiguration(FunctionChoiceBehaviorOptions options) + { + this.Options = options; + } + + /// + /// Represents an AI model's decision-making strategy for calling functions. + /// + public FunctionChoice Choice { get; internal init; } + + /// + /// The functions available for AI model. + /// + public IReadOnlyList? Functions { get; internal init; } + + /// + /// Indicates whether the functions should be automatically invoked by the AI connector. + /// + public bool AutoInvoke { get; set; } = true; + + /// + /// The behavior options. + /// + public FunctionChoiceBehaviorOptions Options { get; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorConfigurationContext.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorConfigurationContext.cs new file mode 100644 index 000000000000..bfb0307e15c4 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorConfigurationContext.cs @@ -0,0 +1,37 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace Microsoft.SemanticKernel; + +/// +/// The context is to be provided by the choice behavior consumer โ€“ AI connector in order to obtain the choice behavior configuration. +/// +[Experimental("SKEXP0001")] +public sealed class FunctionChoiceBehaviorConfigurationContext +{ + /// + /// Creates a new instance of . + /// + /// History of the current chat session. + public FunctionChoiceBehaviorConfigurationContext(ChatHistory chatHistory) + { + this.ChatHistory = chatHistory; + } + + /// + /// History of the current chat session. + /// + public ChatHistory ChatHistory { get; } + + /// + /// The used by in the current chat session. + /// + public Kernel? Kernel { get; init; } + + /// + /// Request sequence index of automatic function invocation process. Starts from 0. + /// + public int RequestSequenceIndex { get; init; } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs new file mode 100644 index 000000000000..4bcf9d5c112c --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorOptions.cs @@ -0,0 +1,13 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents the options for a function choice behavior. At the moment this is empty but it is being included for future use. +/// +[Experimental("SKEXP0001")] +public sealed class FunctionChoiceBehaviorOptions +{ +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/NoneFunctionChoiceBehavior.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/NoneFunctionChoiceBehavior.cs new file mode 100644 index 000000000000..5baa8c8f1c93 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/NoneFunctionChoiceBehavior.cs @@ -0,0 +1,64 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents that provides either all of the 's plugins' functions to AI model to call or specified ones but instructs it not to call any of them. +/// The model may use the provided function in the response it generates. E.g. the model may describe which functions it would call and with what parameter values. +/// This response is useful if the user should first validate what functions the model will use. +/// +internal sealed class NoneFunctionChoiceBehavior : FunctionChoiceBehavior +{ + /// + /// Initializes a new instance of the class. + /// + [JsonConstructor] + public NoneFunctionChoiceBehavior() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// + /// Functions to provide to AI model. If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model. + /// + /// The behavior options. + public NoneFunctionChoiceBehavior(IEnumerable? functions = null, FunctionChoiceBehaviorOptions? options = null) : base(functions) + { + this.Functions = functions?.Select(f => FunctionName.ToFullyQualifiedName(f.Name, f.PluginName, FunctionNameSeparator)).ToList(); + this.Options = options; + } + + /// + /// Fully qualified names of the functions to provide to AI model. + /// If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + [JsonPropertyName("functions")] + public IList? Functions { get; set; } + + /// + /// The behavior options. + /// + [JsonPropertyName("options")] + public FunctionChoiceBehaviorOptions? Options { get; set; } + + /// + public override FunctionChoiceBehaviorConfiguration GetConfiguration(FunctionChoiceBehaviorConfigurationContext context) + { + var functions = base.GetFunctions(this.Functions, context.Kernel, autoInvoke: false); + + return new FunctionChoiceBehaviorConfiguration(this.Options ?? DefaultOptions) + { + Choice = FunctionChoice.None, + Functions = functions, + AutoInvoke = false, + }; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/RequiredFunctionChoiceBehavior.cs b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/RequiredFunctionChoiceBehavior.cs new file mode 100644 index 000000000000..4196e0f89edb --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/FunctionChoiceBehaviors/RequiredFunctionChoiceBehavior.cs @@ -0,0 +1,89 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Represents that provides either all of the 's plugins' functions to AI model to call or specified ones. +/// This behavior forces the model to always call one or more functions. +/// +internal sealed class RequiredFunctionChoiceBehavior : FunctionChoiceBehavior +{ + /// + /// Indicates whether the functions should be automatically invoked by AI connectors. + /// + private readonly bool _autoInvoke = true; + + /// + /// Initializes a new instance of the class. + /// + [JsonConstructor] + public RequiredFunctionChoiceBehavior() + { + } + + /// + /// Initializes a new instance of the class. + /// + /// + /// Functions to provide to AI model. If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + /// + /// Indicates whether the functions should be automatically invoked by AI connectors. + /// + /// The behavior options. + public RequiredFunctionChoiceBehavior( + IEnumerable? functions = null, + bool autoInvoke = true, + FunctionChoiceBehaviorOptions? options = null) : base(functions) + { + this.Functions = functions?.Select(f => FunctionName.ToFullyQualifiedName(f.Name, f.PluginName, FunctionNameSeparator)).ToList(); + this._autoInvoke = autoInvoke; + this.Options = options; + } + + /// + /// Fully qualified names of the functions to provide to AI model. + /// If null, all 's plugins' functions are provided to the model. + /// If empty, no functions are provided to the model, which is equivalent to disabling function calling. + /// + [JsonPropertyName("functions")] + public IList? Functions { get; set; } + + /// + /// The behavior options. + /// + [JsonPropertyName("options")] + public FunctionChoiceBehaviorOptions? Options { get; set; } + + /// + public override FunctionChoiceBehaviorConfiguration GetConfiguration(FunctionChoiceBehaviorConfigurationContext context) + { + // Stop advertising functions after the first request to prevent the AI model from repeatedly calling the same function. + // This is a temporary solution which will be removed after we have a way to dynamically control list of functions to advertise to the model. + if (context.RequestSequenceIndex >= 1) + { + return new FunctionChoiceBehaviorConfiguration(this.Options ?? DefaultOptions) + { + Choice = FunctionChoice.Required, + Functions = null, + AutoInvoke = this._autoInvoke, + }; + } + + var functions = base.GetFunctions(this.Functions, context.Kernel, this._autoInvoke); + + IReadOnlyList? selectedFunctions = null; + + return new FunctionChoiceBehaviorConfiguration(this.Options ?? DefaultOptions) + { + Choice = FunctionChoice.Required, + Functions = selectedFunctions ?? functions, + AutoInvoke = this._autoInvoke, + }; + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index f10ccaa3ff39..ca838c4393c0 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -65,6 +65,43 @@ public string? ModelId } } + /// + /// Gets or sets the behavior defining the way functions are chosen by LLM and how they are invoked by AI connectors. + /// + /// + /// + /// To disable function calling, and have the model only generate a user-facing message, set the property to null (the default). + /// + /// To allow the model to decide whether to call the functions and, if so, which ones to call, set the property to an instance returned + /// by the method. + /// + /// + /// To force the model to always call one or more functions set the property to an instance returned + /// by the method. + /// + /// + /// To instruct the model to not call any functions and only generate a user-facing message, set the property to an instance returned + /// by the method. + /// + /// + /// For all the behaviors that presume the model to call functions, auto-invoke can be specified. If LLM + /// call a function and auto-invoke enabled, SK will attempt to resolve that function from the functions + /// available, and if found, rather than returning the response back to the caller, it will invoke the function automatically. + /// The intermediate messages will be retained in the provided . + /// + [JsonPropertyName("function_choice_behavior")] + [Experimental("SKEXP0001")] + public FunctionChoiceBehavior? FunctionChoiceBehavior + { + get => this._functionChoiceBehavior; + + set + { + this.ThrowIfFrozen(); + this._functionChoiceBehavior = value; + } + } + /// /// Extra properties that may be included in the serialized execution settings. /// @@ -112,12 +149,15 @@ public virtual void Freeze() /// public virtual PromptExecutionSettings Clone() { +#pragma warning disable SKEXP0001 // FunctionChoiceBehavior is an experimental feature and is subject to change in future updates. Suppress this diagnostic to proceed. return new() { ModelId = this.ModelId, ServiceId = this.ServiceId, + FunctionChoiceBehavior = this.FunctionChoiceBehavior, ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null }; +#pragma warning restore SKEXP0001 // FunctionChoiceBehavior is an experimental feature and is subject to change in future updates. Suppress this diagnostic to proceed. } /// @@ -137,6 +177,7 @@ protected void ThrowIfFrozen() private string? _modelId; private IDictionary? _extensionData; private string? _serviceId; + private FunctionChoiceBehavior? _functionChoiceBehavior; #endregion } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs index c4c967445a6b..3eb2d890aa54 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/ITextToImageService.cs @@ -1,5 +1,6 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; @@ -14,19 +15,17 @@ namespace Microsoft.SemanticKernel.TextToImage; public interface ITextToImageService : IAIService { /// - /// Generate an image matching the given description + /// Given a prompt and/or an input text, the model will generate a new image. /// - /// Image description - /// Image width in pixels - /// Image height in pixels + /// Input text for image generation + /// Text to image execution settings /// The containing services, plugins, and other state for use throughout the operation. /// The to monitor for cancellation requests. The default is . - /// Generated image in base64 format or image URL + /// Generated image contents [Experimental("SKEXP0001")] - public Task GenerateImageAsync( - string description, - int width, - int height, + public Task> GetImageContentsAsync( + TextContent input, + PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default); } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/TextToImageServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/TextToImageServiceExtensions.cs new file mode 100644 index 000000000000..26945f32c4a4 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextToImage/TextToImageServiceExtensions.cs @@ -0,0 +1,44 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace Microsoft.SemanticKernel.TextToImage; + +/// +/// Extension methods for . +/// +public static class TextToImageServiceExtensions +{ + /// + /// Given a prompt and/or an input text, the model will generate a new image. + /// + /// Target instance + /// Image generation prompt + /// Image width in pixels + /// Image height in pixels + /// The containing services, plugins, and other state for use throughout the operation. + /// The to monitor for cancellation requests. The default is . + /// Generated image in base64 format or image URL + public static async Task GenerateImageAsync(this ITextToImageService service, + string description, + int width, + int height, + Kernel? kernel = null, + CancellationToken cancellationToken = default) + { + var imageJson = $$""" + { + "width": {{width}}, + "height": {{height}} + } + """; + + var executionSettings = JsonSerializer.Deserialize(imageJson); + + var result = await service.GetImageContentsAsync(new TextContent(description), executionSettings, kernel, cancellationToken).ConfigureAwait(false); + + return result[0].Uri!.ToString(); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml b/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml new file mode 100644 index 000000000000..f0c61ea95587 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/CompatibilitySuppressions.xml @@ -0,0 +1,32 @@ +๏ปฟ + + + + CP0002 + M:Microsoft.SemanticKernel.TextToImage.ITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0002 + M:Microsoft.SemanticKernel.TextToImage.ITextToImageService.GenerateImageAsync(System.String,System.Int32,System.Int32,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0006 + M:Microsoft.SemanticKernel.TextToImage.ITextToImageService.GetImageContentsAsync(Microsoft.SemanticKernel.TextContent,Microsoft.SemanticKernel.PromptExecutionSettings,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + lib/net8.0/Microsoft.SemanticKernel.Abstractions.dll + true + + + CP0006 + M:Microsoft.SemanticKernel.TextToImage.ITextToImageService.GetImageContentsAsync(Microsoft.SemanticKernel.TextContent,Microsoft.SemanticKernel.PromptExecutionSettings,Microsoft.SemanticKernel.Kernel,System.Threading.CancellationToken) + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + lib/netstandard2.0/Microsoft.SemanticKernel.Abstractions.dll + true + + \ No newline at end of file diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs index f9e6f9f3d71f..f751ea6fc448 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/AnnotationContent.cs @@ -20,8 +20,7 @@ public class AnnotationContent : KernelContent /// /// The citation. /// - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Quote { get; init; } + public string Quote { get; init; } = string.Empty; /// /// Start index of the citation. @@ -43,13 +42,17 @@ public AnnotationContent() /// /// Initializes a new instance of the class. /// + /// The source text being referenced. /// The model ID used to generate the content. - /// Inner content, + /// Inner content /// Additional metadata public AnnotationContent( + string quote, string? modelId = null, object? innerContent = null, IReadOnlyDictionary? metadata = null) : base(innerContent, modelId, metadata) - { } + { + this.Quote = quote; + } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs index 16ac0cd7828e..925d74d0c731 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FileReferenceContent.cs @@ -28,7 +28,7 @@ public FileReferenceContent() /// /// The identifier of the referenced file. /// The model ID used to generate the content. - /// Inner content, + /// Inner content /// Additional metadata public FileReferenceContent( string fileId, diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContent.cs index 94c0109fe807..5530d568b562 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContent.cs @@ -11,7 +11,7 @@ namespace Microsoft.SemanticKernel; /// -/// Represents a function call requested by LLM. +/// Represents a function call requested by AI model. /// [Experimental("SKEXP0001")] public sealed class FunctionCallContent : KernelContent @@ -40,7 +40,7 @@ public sealed class FunctionCallContent : KernelContent public KernelArguments? Arguments { get; } /// - /// The exception that occurred while mapping original LLM function call to the model class. + /// The exception that occurred while mapping original AI model function call to the model class. /// [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public Exception? Exception { get; init; } @@ -75,7 +75,7 @@ public async Task InvokeAsync(Kernel kernel, Cancellation if (this.Exception is not null) { - return new FunctionResultContent(this, this.Exception.Message); + throw this.Exception; } if (kernel.Plugins.TryGetFunction(this.PluginName, this.FunctionName, out KernelFunction? function)) diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs index 183542021705..8dbcc00eb25d 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/KernelContent.cs @@ -9,13 +9,14 @@ namespace Microsoft.SemanticKernel; /// /// Base class for all AI non-streaming results /// -[JsonPolymorphic(TypeDiscriminatorPropertyName = "$type")] +[JsonPolymorphic(TypeDiscriminatorPropertyName = "$type", UnknownDerivedTypeHandling = JsonUnknownDerivedTypeHandling.FallBackToNearestAncestor)] [JsonDerivedType(typeof(TextContent), typeDiscriminator: nameof(TextContent))] [JsonDerivedType(typeof(ImageContent), typeDiscriminator: nameof(ImageContent))] [JsonDerivedType(typeof(FunctionCallContent), typeDiscriminator: nameof(FunctionCallContent))] [JsonDerivedType(typeof(FunctionResultContent), typeDiscriminator: nameof(FunctionResultContent))] [JsonDerivedType(typeof(BinaryContent), typeDiscriminator: nameof(BinaryContent))] [JsonDerivedType(typeof(AudioContent), typeDiscriminator: nameof(AudioContent))] +[JsonDerivedType(typeof(ChatMessageContent), typeDiscriminator: nameof(ChatMessageContent))] #pragma warning disable SKEXP0110 [JsonDerivedType(typeof(AnnotationContent), typeDiscriminator: nameof(AnnotationContent))] [JsonDerivedType(typeof(FileReferenceContent), typeDiscriminator: nameof(FileReferenceContent))] diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs new file mode 100644 index 000000000000..609f94a87180 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingAnnotationContent.cs @@ -0,0 +1,78 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Content type to support message annotations. +/// +[Experimental("SKEXP0110")] +public class StreamingAnnotationContent : StreamingKernelContent +{ + /// + /// The file identifier. + /// + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? FileId { get; init; } + + /// + /// The citation. + /// + public string Quote { get; init; } = string.Empty; + + /// + /// Start index of the citation. + /// + public int StartIndex { get; init; } + + /// + /// End index of the citation. + /// + public int EndIndex { get; init; } + + /// + /// Initializes a new instance of the class. + /// + [JsonConstructor] + public StreamingAnnotationContent() + { } + + /// + /// Initializes a new instance of the class. + /// + /// The source text being referenced. + /// The model ID used to generate the content. + /// Inner content + /// Additional metadata + public StreamingAnnotationContent( + string quote, + string? modelId = null, + object? innerContent = null, + IReadOnlyDictionary? metadata = null) + : base(innerContent, choiceIndex: 0, modelId, metadata) + { + this.Quote = quote; + } + + /// + public override string ToString() + { + bool hasFileId = !string.IsNullOrEmpty(this.FileId); + + if (hasFileId) + { + return $"{this.Quote}: {this.FileId}"; + } + + return this.Quote; + } + + /// + public override byte[] ToByteArray() + { + return Encoding.UTF8.GetBytes(this.ToString()); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingFileReferenceContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingFileReferenceContent.cs new file mode 100644 index 000000000000..83b76946ef8e --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingFileReferenceContent.cs @@ -0,0 +1,55 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel; + +/// +/// Content type to support file references. +/// +[Experimental("SKEXP0110")] +public class StreamingFileReferenceContent : StreamingKernelContent +{ + /// + /// The file identifier. + /// + public string FileId { get; init; } = string.Empty; + + /// + /// Initializes a new instance of the class. + /// + [JsonConstructor] + public StreamingFileReferenceContent() + { } + + /// + /// Initializes a new instance of the class. + /// + /// The identifier of the referenced file. + /// The model ID used to generate the content. + /// Inner content + /// Additional metadata + public StreamingFileReferenceContent( + string fileId, + string? modelId = null, + object? innerContent = null, + IReadOnlyDictionary? metadata = null) + : base(innerContent, choiceIndex: 0, modelId, metadata) + { + this.FileId = fileId; + } + + /// + public override string ToString() + { + return this.FileId; + } + + /// + public override byte[] ToByteArray() + { + return Encoding.UTF8.GetBytes(this.ToString()); + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs index b8c3867ff358..558ab739d279 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Contents/TextContent.cs @@ -57,4 +57,13 @@ public override string ToString() { return this.Text ?? string.Empty; } + + /// + /// When converting a string to a , the content is automatically set to the string value. + /// + /// Text content + public static implicit operator TextContent(string text) + { + return new TextContent(text); + } } diff --git a/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/DistanceFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/DistanceFunction.cs index 32601243966b..cf8b459b2472 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/DistanceFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/DistanceFunction.cs @@ -54,6 +54,19 @@ public static class DistanceFunction /// public const string EuclideanDistance = nameof(EuclideanDistance); + /// + /// Measures the Euclidean squared distance between two vectors. + /// + /// + /// Also known as l2-squared. + /// + public const string EuclideanSquaredDistance = nameof(EuclideanSquaredDistance); + + /// + /// Number of differences between vectors at each dimensions. + /// + public const string Hamming = nameof(Hamming); + /// /// Measures the Manhattan distance between two vectors. /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/IndexKind.cs b/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/IndexKind.cs index 364baaa8e727..88f14b7ef67c 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/IndexKind.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Data/RecordDefinition/IndexKind.cs @@ -33,4 +33,26 @@ public static class IndexKind /// Better with smaller datasets. /// public const string Flat = nameof(Flat); + + /// + /// Inverted File with Flat Compression. Designed to enhance search efficiency by narrowing the search area through the use of neighbor partitions or clusters. + /// Also referred to as approximate nearest neighbor (ANN) search. + /// + public const string IvfFlat = nameof(IvfFlat); + + /// + /// Disk-based Approximate Nearest Neighbor algorithm designed for efficiently searching for approximate nearest neighbors (ANN) in high-dimensional spaces. + /// The primary focus of DiskANN is to handle large-scale datasets that cannot fit entirely into memory, leveraging disk storage to store the data while maintaining fast search times. + /// + public const string DiskAnn = nameof(DiskAnn); + + /// + /// Index that compresses vectors using DiskANN-based quantization methods for better efficiency in the kNN search. + /// + public const string QuantizedFlat = nameof(QuantizedFlat); + + /// + /// Dynamic index allows to automatically switch from to indexes. + /// + public const string Dynamic = nameof(Dynamic); } diff --git a/dotnet/src/SemanticKernel.Abstractions/Data/VectorStoreGenericDataModel.cs b/dotnet/src/SemanticKernel.Abstractions/Data/VectorStoreGenericDataModel.cs new file mode 100644 index 000000000000..cccec7f5b5d6 --- /dev/null +++ b/dotnet/src/SemanticKernel.Abstractions/Data/VectorStoreGenericDataModel.cs @@ -0,0 +1,34 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.SemanticKernel.Data; + +/// +/// A generic data model that can be used to store and retrieve any data from a vector store. +/// +/// The data type of the record key. +/// The key of the record. +[Experimental("SKEXP0001")] +public sealed class VectorStoreGenericDataModel(TKey key) + where TKey : notnull +{ + /// + /// Gets or sets the key of the record. + /// + public TKey Key { get; set; } = key; + + /// + /// Gets or sets a dictionary of data items stored in the record. + /// + /// + /// This dictionary contains all fields that are not vectors. + /// + public Dictionary Data { get; init; } = new(); + + /// + /// Gets or sets a dictionary of vectors stored in the record. + /// + public Dictionary Vectors { get; init; } = new(); +} diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs index b838d7b30261..149dbf108ece 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs @@ -37,7 +37,7 @@ public abstract class KernelFunction private static readonly Histogram s_invocationDuration = s_meter.CreateHistogram( name: "semantic_kernel.function.invocation.duration", unit: "s", - description: "Measures the duration of a functionโ€™s execution"); + description: "Measures the duration of a function's execution"); /// to record function streaming duration. /// @@ -47,7 +47,7 @@ public abstract class KernelFunction private static readonly Histogram s_streamingDuration = s_meter.CreateHistogram( name: "semantic_kernel.function.streaming.duration", unit: "s", - description: "Measures the duration of a functionโ€™s streaming execution"); + description: "Measures the duration of a function's streaming execution"); /// /// Gets the name of the function. diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs index 9de192858eb0..f28478db7a69 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryQueryResult.cs @@ -3,7 +3,6 @@ using System; using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Memory; @@ -27,7 +26,6 @@ public class MemoryQueryResult /// /// Nullable embedding associated with the metadata returned for by a query. /// - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory? Embedding { get; } /// diff --git a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs index 1a95ee13dbe0..c4b0f09289bc 100644 --- a/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs +++ b/dotnet/src/SemanticKernel.Abstractions/Memory/MemoryRecord.cs @@ -4,7 +4,6 @@ using System.Diagnostics.CodeAnalysis; using System.Text.Json; using System.Text.Json.Serialization; -using Microsoft.SemanticKernel.Text; namespace Microsoft.SemanticKernel.Memory; @@ -18,7 +17,6 @@ public class MemoryRecord : DataEntryBase /// Source content embeddings. /// [JsonPropertyName("embedding")] - [JsonConverter(typeof(ReadOnlyMemoryConverter))] public ReadOnlyMemory Embedding { get; } /// diff --git a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj index 81e196b63b91..2c2ed1b1aad1 100644 --- a/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj +++ b/dotnet/src/SemanticKernel.Abstractions/SemanticKernel.Abstractions.csproj @@ -8,6 +8,10 @@ true + + rc + + diff --git a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs index decfa8ef20ea..22659d455e0f 100644 --- a/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs +++ b/dotnet/src/SemanticKernel.Core/Data/VolatileVectorStoreRecordCollection.cs @@ -1,10 +1,8 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. -using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -using System.Linq; using System.Reflection; using System.Runtime.CompilerServices; using System.Threading; @@ -50,16 +48,10 @@ public VolatileVectorStoreRecordCollection(string collectionName, VolatileVector this._collectionName = collectionName; this._internalCollection = new(); this._options = options ?? new VolatileVectorStoreRecordCollectionOptions(); - var vectorStoreRecordDefinition = this._options.VectorStoreRecordDefinition ?? VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(TRecord), true); + var vectorStorePropertyReader = new VectorStoreRecordPropertyReader(typeof(TRecord), this._options.VectorStoreRecordDefinition, new() { RequiresAtLeastOneVector = false, SupportsMultipleKeys = false, SupportsMultipleVectors = true }); // Get the key property info. - var keyProperty = vectorStoreRecordDefinition.Properties.OfType().FirstOrDefault(); - if (keyProperty is null) - { - throw new ArgumentException($"No Key property found on {typeof(TRecord).Name} or provided via {nameof(VectorStoreRecordDefinition)}"); - } - - this._keyPropertyInfo = typeof(TRecord).GetProperty(keyProperty.DataModelPropertyName) ?? throw new ArgumentException($"Key property {keyProperty.DataModelPropertyName} not found on {typeof(TRecord).Name}"); + this._keyPropertyInfo = vectorStorePropertyReader.KeyPropertyInfo; } /// diff --git a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj index 7eeee98743d5..ff9c1e8986c4 100644 --- a/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj +++ b/dotnet/src/SemanticKernel.Core/SemanticKernel.Core.csproj @@ -11,6 +11,10 @@ true + + rc + + diff --git a/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj b/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj index cd5be49a67cb..86cbde81153c 100644 --- a/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj +++ b/dotnet/src/SemanticKernel.MetaPackage/SemanticKernel.MetaPackage.csproj @@ -4,6 +4,9 @@ $(AssemblyName) net8.0;netstandard2.0 + + rc + @@ -13,6 +16,6 @@ Empowers app owners to integrate cutting-edge LLM technology quickly and easily - + - \ No newline at end of file + diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/AutoFunctionChoiceBehaviorTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/AutoFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..ba6cb5584dd2 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/AutoFunctionChoiceBehaviorTests.cs @@ -0,0 +1,273 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.AI.FunctionChoiceBehaviors; + +/// +/// Unit tests for +/// +public sealed class AutoFunctionChoiceBehaviorTests +{ + private readonly Kernel _kernel; + + public AutoFunctionChoiceBehaviorTests() + { + this._kernel = new Kernel(); + } + + [Fact] + public void ItShouldAdvertiseAllKernelFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void ItShouldAdvertiseOnlyFunctionsSuppliedViaConstructor() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void ItShouldAdvertiseOnlyFunctionsSuppliedInFunctionsProperty() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior() + { + Functions = ["MyPlugin.Function1", "MyPlugin.Function2"] + }; + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void ItShouldAdvertiseOnlyFunctionsSuppliedViaConstructorForManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior([plugin.ElementAt(0), plugin.ElementAt(1)], autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void ItShouldAdvertiseAllKernelFunctionsForManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void ItShouldAllowAutoInvocationByDefault() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.True(config.AutoInvoke); + } + + [Fact] + public void ItShouldAllowManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.False(config.AutoInvoke); + } + + [Fact] + public void ItShouldInitializeFunctionPropertyByFunctionsPassedViaConstructor() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + // Assert + Assert.NotNull(choiceBehavior.Functions); + Assert.Equal(2, choiceBehavior.Functions.Count); + + Assert.Equal("MyPlugin.Function1", choiceBehavior.Functions.ElementAt(0)); + Assert.Equal("MyPlugin.Function2", choiceBehavior.Functions.ElementAt(1)); + } + + [Fact] + public void ItShouldThrowExceptionIfAutoInvocationRequestedButNoKernelIsProvided() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + var choiceBehavior = new AutoFunctionChoiceBehavior(); + + // Act + var exception = Assert.Throws(() => + { + choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = null }); + }); + + Assert.Equal("Auto-invocation is not supported when no kernel is provided.", exception.Message); + } + + [Fact] + public void ItShouldThrowExceptionIfAutoInvocationRequestedAndFunctionIsNotRegisteredInKernel() + { + // Arrange + var plugin = GetTestPlugin(); + + var choiceBehavior = new AutoFunctionChoiceBehavior(functions: [plugin.ElementAt(0)]); + + // Act + var exception = Assert.Throws(() => + { + choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + }); + + Assert.Equal("The specified function MyPlugin.Function1 is not available in the kernel.", exception.Message); + } + + [Fact] + public void ItShouldThrowExceptionIfNoFunctionFoundAndManualInvocationIsRequested() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + var choiceBehavior = new AutoFunctionChoiceBehavior(autoInvoke: false) + { + Functions = ["MyPlugin.NonKernelFunction"] + }; + + // Act + var exception = Assert.Throws(() => + { + choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + }); + + Assert.Equal("The specified function MyPlugin.NonKernelFunction was not found.", exception.Message); + } + + [Fact] + public void ItShouldPropagateOptionsToConfiguration() + { + // Arrange + var options = new FunctionChoiceBehaviorOptions(); + + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(autoInvoke: false, options: options); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.Same(options, configuration.Options); + } + + [Fact] + public void ItShouldUseDefaultOptionsIfNoneAreProvided() + { + // Act + var choiceBehavior = new AutoFunctionChoiceBehavior(autoInvoke: false); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.NotNull(configuration.Options); + } + + private static KernelPlugin GetTestPlugin() + { + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + var function3 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2, function3]); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs new file mode 100644 index 000000000000..197640eca0f0 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorDeserializationTests.cs @@ -0,0 +1,281 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.AI.FunctionChoiceBehaviors; + +public class FunctionChoiceBehaviorDeserializationTests +{ + private readonly Kernel _kernel; + + public FunctionChoiceBehaviorDeserializationTests() + { + var plugin = GetTestPlugin(); + + this._kernel = new Kernel(); + this._kernel.Plugins.Add(plugin); + } + + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromJsonWithNoFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "auto" + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Auto, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromJsonWithEmptyFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "auto", + "functions": [] + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Auto, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.Null(config?.Functions); + } + + [Fact] + public void ItShouldDeserializeAutoFunctionChoiceBehaviorFromJsonWithNotEmptyFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "auto", + "functions": ["MyPlugin.Function1", "MyPlugin.Function3"] + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Auto, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromJsonWithNoFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "required" + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Required, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromJsonWithEmptyFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "required", + "functions": [] + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Required, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.Null(config?.Functions); + } + + [Fact] + public void ItShouldDeserializeRequiredFunctionChoiceBehaviorFromJsonWithNotEmptyFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "required", + "functions": ["MyPlugin.Function1", "MyPlugin.Function3"] + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.Required, config.Choice); + + Assert.True(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromJsonWithNoFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "none" + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.None, config.Choice); + + Assert.False(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + [Fact] + public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromJsonWithEmptyFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "none", + "functions": [] + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.None, config.Choice); + + Assert.False(config.AutoInvoke); + + Assert.Null(config?.Functions); + } + + [Fact] + public void ItShouldDeserializedNoneFunctionChoiceBehaviorFromJsonWithNotEmptyFunctionsProperty() + { + // Arrange + var json = """ + { + "type": "none", + "functions": ["MyPlugin.Function1", "MyPlugin.Function3"] + } + """; + + var sut = JsonSerializer.Deserialize(json); + + // Act + var config = sut!.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.Equal(FunctionChoice.None, config.Choice); + + Assert.False(config.AutoInvoke); + + Assert.NotNull(config?.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.PluginName == "MyPlugin" && f.Name == "Function3"); + } + + private static KernelPlugin GetTestPlugin() + { + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + var function3 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2, function3]); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..0197768c4227 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceBehaviorTests.cs @@ -0,0 +1,347 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Functions; + +/// +/// Unit tests for +/// +public sealed class FunctionChoiceBehaviorTests +{ + private readonly Kernel _kernel; + + public FunctionChoiceBehaviorTests() + { + this._kernel = new Kernel(); + } + + [Fact] + public void AutoFunctionChoiceShouldBeUsed() + { + // Act + var choiceBehavior = FunctionChoiceBehavior.Auto(); + + // Assert + Assert.IsType(choiceBehavior); + } + + [Fact] + public void RequiredFunctionChoiceShouldBeUsed() + { + // Act + var choiceBehavior = FunctionChoiceBehavior.Required(); + + // Assert + Assert.IsType(choiceBehavior); + } + + [Fact] + public void NoneFunctionChoiceShouldBeUsed() + { + // Act + var choiceBehavior = FunctionChoiceBehavior.None(); + + // Assert + Assert.IsType(choiceBehavior); + } + + [Fact] + public void AutoFunctionChoiceShouldAdvertiseKernelFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Auto(functions: null); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void AutoFunctionChoiceShouldAdvertiseProvidedFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Auto(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void AutoFunctionChoiceShouldAllowAutoInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: true); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.True(config.AutoInvoke); + } + + [Fact] + public void AutoFunctionChoiceShouldAllowManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.False(config.AutoInvoke); + } + + [Fact] + public void RequiredFunctionChoiceShouldAdvertiseKernelFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Required(functions: null); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void RequiredFunctionChoiceShouldAdvertiseProvidedFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Required(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void RequiredFunctionChoiceShouldAllowAutoInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.True(config.AutoInvoke); + } + + //[Fact] + //This test should be uncommented when the solution to dynamically control list of functions to advertise to the model is implemented. + //public void RequiredFunctionChoiceShouldReturnNoFunctionAsSpecifiedByFunctionsSelector() + //{ + // // Arrange + // var plugin = GetTestPlugin(); + // this._kernel.Plugins.Add(plugin); + + // static IReadOnlyList? FunctionsSelector(FunctionChoiceBehaviorFunctionsSelectorContext context) + // { + // return []; + // } + + // // Act + // var choiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true, functionsSelector: FunctionsSelector); + + // var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // // Assert + // Assert.NotNull(config.Functions); + // Assert.Empty(config.Functions); + //} + + //[Fact] + //This test should be uncommented when the solution to dynamically control the list of functions to advertise to the model is implemented. + //public void RequiredFunctionChoiceShouldReturnFunctionsAsSpecifiedByFunctionsSelector() + //{ + // // Arrange + // var plugin = GetTestPlugin(); + // this._kernel.Plugins.Add(plugin); + + // static IReadOnlyList? FunctionsSelector(FunctionChoiceBehaviorFunctionsSelectorContext context) + // { + // return context.Functions!.Where(f => f.Name == "Function1").ToList(); + // } + + // // Act + // var choiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: true, functionsSelector: FunctionsSelector); + + // var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // // Assert + // Assert.NotNull(config?.Functions); + // Assert.Single(config.Functions); + // Assert.Equal("Function1", config.Functions[0].Name); + //} + + [Fact] + public void RequiredFunctionChoiceShouldAllowManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.False(config.AutoInvoke); + } + + [Fact] + public void NoneFunctionChoiceShouldAdvertiseProvidedFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + + // Act + var choiceBehavior = FunctionChoiceBehavior.None([plugin.ElementAt(0), plugin.ElementAt(2)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void NoneFunctionChoiceShouldAdvertiseAllKernelFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = FunctionChoiceBehavior.None(); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void FunctionChoiceBehaviorShouldPassOptionsToAutoFunctionChoiceBehaviorClass() + { + // Arrange + var options = new FunctionChoiceBehaviorOptions(); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false, options: options); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.Same(options, configuration.Options); + } + + [Fact] + public void FunctionChoiceBehaviorShouldPassOptionsToRequiredFunctionChoiceBehaviorClass() + { + // Arrange + var options = new FunctionChoiceBehaviorOptions(); + + // Act + var choiceBehavior = FunctionChoiceBehavior.Required(autoInvoke: false, options: options); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.Same(options, configuration.Options); + } + + [Fact] + public void FunctionChoiceBehaviorShouldPassOptionsToNoneFunctionChoiceBehaviorClass() + { + // Arrange + var options = new FunctionChoiceBehaviorOptions(); + + // Act + var choiceBehavior = FunctionChoiceBehavior.None(options: options); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.Same(options, configuration.Options); + } + + private static KernelPlugin GetTestPlugin() + { + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + var function3 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2, function3]); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceTests.cs new file mode 100644 index 000000000000..359ea11ce4de --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/FunctionChoiceTests.cs @@ -0,0 +1,106 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.AI.FunctionChoiceBehaviors; + +public class FunctionChoiceTests +{ + [Fact] + public void ItShouldInitializeLabelForAutoFunctionChoice() + { + // Act + var choice = FunctionChoice.Auto; + + // Assert + Assert.Equal("auto", choice.Label); + } + + [Fact] + public void ItShouldInitializeLabelForRequiredFunctionChoice() + { + // Act + var choice = FunctionChoice.Required; + + // Assert + Assert.Equal("required", choice.Label); + } + + [Fact] + public void ItShouldInitializeLabelForNoneFunctionChoice() + { + // Act + var choice = FunctionChoice.None; + + // Assert + Assert.Equal("none", choice.Label); + } + + [Fact] + public void ItShouldCheckTwoChoicesAreEqual() + { + // Arrange + var choice1 = FunctionChoice.Auto; + var choice2 = FunctionChoice.Auto; + + // Act & Assert + Assert.True(choice1 == choice2); + } + + [Fact] + public void ItShouldCheckTwoChoicesAreNotEqual() + { + // Arrange + var choice1 = FunctionChoice.Auto; + var choice2 = FunctionChoice.Required; + + // Act & Assert + Assert.False(choice1 == choice2); + } + + [Fact] + public void ItShouldCheckChoiceIsEqualToItself() + { + // Arrange + var choice = FunctionChoice.Auto; + + // Act & Assert +#pragma warning disable CS1718 // Comparison made to same variable + Assert.True(choice == choice); +#pragma warning restore CS1718 // Comparison made to same variable + } + + [Fact] + public void ItShouldCheckChoiceIsNotEqualToDifferentType() + { + // Arrange + var choice = FunctionChoice.Auto; + + // Act & Assert + Assert.False(choice.Equals("auto")); + } + + [Fact] + public void ItShouldCheckChoiceIsNotEqualToNull() + { + // Arrange + var choice = FunctionChoice.Auto; + + // Act & Assert + Assert.False(choice.Equals(null)); + } + + [Fact] + public void ToStringShouldReturnLabel() + { + // Arrange + var choice = FunctionChoice.Auto; + + // Act + var result = choice.ToString(); + + // Assert + Assert.Equal("auto", result); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/NoneFunctionChoiceBehaviorTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/NoneFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..c2f3dc37b06a --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/NoneFunctionChoiceBehaviorTests.cs @@ -0,0 +1,112 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.AI.FunctionChoiceBehaviors; + +/// +/// Unit tests for +/// +public sealed class NoneFunctionChoiceBehaviorTests +{ + private readonly Kernel _kernel; + + public NoneFunctionChoiceBehaviorTests() + { + this._kernel = new Kernel(); + } + + [Fact] + public void ItShouldAdvertiseKernelFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new NoneFunctionChoiceBehavior(); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void ItShouldAdvertiseFunctionsIfSpecified() + { + // Arrange + var plugin = GetTestPlugin(); + + // Act + var choiceBehavior = new NoneFunctionChoiceBehavior([plugin.ElementAt(0), plugin.ElementAt(2)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void ItShouldNotAllowAutoInvocation() + { + // Arrange + var choiceBehavior = new NoneFunctionChoiceBehavior(); + + // Act + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.False(config.AutoInvoke); + } + + [Fact] + public void ItShouldPropagateOptionsToConfiguration() + { + // Arrange + var options = new FunctionChoiceBehaviorOptions(); + + // Act + var choiceBehavior = new NoneFunctionChoiceBehavior(options: options); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.Same(options, configuration.Options); + } + + [Fact] + public void ItShouldPropagateDefaultOptionsIfNoneAreProvided() + { + // Arrange & Act + var choiceBehavior = new NoneFunctionChoiceBehavior(options: null); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.NotNull(configuration.Options); + } + + private static KernelPlugin GetTestPlugin() + { + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + var function3 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2, function3]); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/RequiredFunctionChoiceBehaviorTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/RequiredFunctionChoiceBehaviorTests.cs new file mode 100644 index 000000000000..aa86a7ad9c3f --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/AI/FunctionChoiceBehaviors/RequiredFunctionChoiceBehaviorTests.cs @@ -0,0 +1,305 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.Linq; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.AI.FunctionChoiceBehaviors; + +/// +/// Unit tests for +/// +public sealed class RequiredFunctionChoiceBehaviorTests +{ + private readonly Kernel _kernel; + + public RequiredFunctionChoiceBehaviorTests() + { + this._kernel = new Kernel(); + } + + [Fact] + public void ItShouldAdvertiseAllKernelFunctions() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Fact] + public void ItShouldAdvertiseOnlyFunctionsSuppliedViaConstructor() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void ItShouldAdvertiseOnlyFunctionsSuppliedInFunctionsProperty() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior() + { + Functions = ["MyPlugin.Function1", "MyPlugin.Function2"] + }; + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void ItShouldAdvertiseOnlyFunctionsSuppliedViaConstructorForManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior([plugin.ElementAt(0), plugin.ElementAt(1)], autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + + [Fact] + public void ItShouldAdvertiseAllKernelFunctionsForManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + + Assert.NotNull(config.Functions); + Assert.Equal(3, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + Assert.Contains(config.Functions, f => f.Name == "Function3"); + } + + [Theory] + [InlineData(0)] + [InlineData(1)] + [InlineData(2)] + [InlineData(100)] + public void ItShouldAdvertiseFunctionsForTheFirstRequestOnly(int requestSequenceIndex) + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel, RequestSequenceIndex = requestSequenceIndex }); + + // Assert + Assert.NotNull(config); + + if (requestSequenceIndex == 0) + { + Assert.NotNull(config.Functions); + Assert.Equal(2, config.Functions.Count); + Assert.Contains(config.Functions, f => f.Name == "Function1"); + Assert.Contains(config.Functions, f => f.Name == "Function2"); + } + else + { + Assert.Null(config.Functions); + } + } + + [Fact] + public void ItShouldAllowAutoInvocationByDefault() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.True(config.AutoInvoke); + } + + [Fact] + public void ItShouldAllowManualInvocation() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(autoInvoke: false); + + var config = choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + + // Assert + Assert.NotNull(config); + Assert.False(config.AutoInvoke); + } + + [Fact] + public void ItShouldInitializeFunctionPropertyByFunctionsPassedViaConstructor() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(functions: [plugin.ElementAt(0), plugin.ElementAt(1)]); + + // Assert + Assert.NotNull(choiceBehavior.Functions); + Assert.Equal(2, choiceBehavior.Functions.Count); + + Assert.Equal("MyPlugin.Function1", choiceBehavior.Functions.ElementAt(0)); + Assert.Equal("MyPlugin.Function2", choiceBehavior.Functions.ElementAt(1)); + } + + [Fact] + public void ItShouldThrowExceptionIfAutoInvocationRequestedButNoKernelIsProvided() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + var choiceBehavior = new RequiredFunctionChoiceBehavior(); + + // Act + var exception = Assert.Throws(() => + { + choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = null }); + }); + + Assert.Equal("Auto-invocation is not supported when no kernel is provided.", exception.Message); + } + + [Fact] + public void ItShouldThrowExceptionIfAutoInvocationRequestedAndFunctionIsNotRegisteredInKernel() + { + // Arrange + var plugin = GetTestPlugin(); + + var choiceBehavior = new RequiredFunctionChoiceBehavior(functions: [plugin.ElementAt(0)]); + + // Act + var exception = Assert.Throws(() => + { + choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + }); + + Assert.Equal("The specified function MyPlugin.Function1 is not available in the kernel.", exception.Message); + } + + [Fact] + public void ItShouldThrowExceptionIfNoFunctionFoundAndManualInvocationIsRequested() + { + // Arrange + var plugin = GetTestPlugin(); + this._kernel.Plugins.Add(plugin); + + var choiceBehavior = new RequiredFunctionChoiceBehavior(autoInvoke: false) + { + Functions = ["MyPlugin.NonKernelFunction"] + }; + + // Act + var exception = Assert.Throws(() => + { + choiceBehavior.GetConfiguration(new(chatHistory: []) { Kernel = this._kernel }); + }); + + Assert.Equal("The specified function MyPlugin.NonKernelFunction was not found.", exception.Message); + } + + [Fact] + public void ItShouldPropagateOptionsToConfiguration() + { + // Arrange + var options = new FunctionChoiceBehaviorOptions(); + + // Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(autoInvoke: false, options: options); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.Same(options, configuration.Options); + } + + [Fact] + public void ItShouldPropagateDefaultOptionsIfNoneAreProvided() + { + // Arrange & Act + var choiceBehavior = new RequiredFunctionChoiceBehavior(autoInvoke: false, options: null); + + // Assert + var configuration = choiceBehavior.GetConfiguration(new FunctionChoiceBehaviorConfigurationContext(chatHistory: [])); + + Assert.NotNull(configuration.Options); + } + + private static KernelPlugin GetTestPlugin() + { + var function1 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function2"); + var function3 = KernelFunctionFactory.CreateFromMethod(() => { }, "Function3"); + + return KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2, function3]); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs index dd822a091175..c8abbace96d2 100644 --- a/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs @@ -20,7 +20,11 @@ public void PromptExecutionSettingsCloneWorksAsExpected() "temperature": 0.5, "top_p": 0.0, "presence_penalty": 0.0, - "frequency_penalty": 0.0 + "frequency_penalty": 0.0, + "function_choice_behavior": { + "type": "auto", + "functions": ["p1.f1"] + } } """; var executionSettings = JsonSerializer.Deserialize(configPayload); @@ -32,6 +36,7 @@ public void PromptExecutionSettingsCloneWorksAsExpected() Assert.NotNull(clone); Assert.Equal(executionSettings.ModelId, clone.ModelId); Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData); + Assert.Equivalent(executionSettings.FunctionChoiceBehavior, clone.FunctionChoiceBehavior); Assert.Equal(executionSettings.ServiceId, clone.ServiceId); } @@ -88,6 +93,7 @@ public void PromptExecutionSettingsFreezeWorksAsExpected() Assert.NotNull(executionSettings.ExtensionData); Assert.Throws(() => executionSettings.ExtensionData.Add("results_per_prompt", 2)); Assert.Throws(() => executionSettings.ExtensionData["temperature"] = 1); + Assert.Throws(() => executionSettings.FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()); executionSettings!.Freeze(); // idempotent Assert.True(executionSettings.IsFrozen); diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs index 167811b1b2e7..524caed4ff29 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/AnnotationContentTests.cs @@ -19,7 +19,7 @@ public void VerifyAnnotationContentInitialState() { AnnotationContent definition = new(); - Assert.Null(definition.Quote); + Assert.Empty(definition.Quote); Assert.Equal(0, definition.StartIndex); Assert.Equal(0, definition.EndIndex); Assert.Null(definition.FileId); diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs index 759fec2b532b..cd753a15e201 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs @@ -202,7 +202,7 @@ public void ItCanBeSerializeAndDeserialized() new FunctionCallContent("function-name", "plugin-name", "function-id", new KernelArguments { ["parameter"] = "argument" }), new FunctionResultContent(new FunctionCallContent("function-name", "plugin-name", "function-id"), "function-result"), new FileReferenceContent(fileId: "file-id-1") { ModelId = "model-7", Metadata = new Dictionary() { ["metadata-key-7"] = "metadata-value-7" } }, - new AnnotationContent() { ModelId = "model-8", FileId = "file-id-2", StartIndex = 2, EndIndex = 24, Quote = "quote-8", Metadata = new Dictionary() { ["metadata-key-8"] = "metadata-value-8" } } + new AnnotationContent("quote-8") { ModelId = "model-8", FileId = "file-id-2", StartIndex = 2, EndIndex = 24, Metadata = new Dictionary() { ["metadata-key-8"] = "metadata-value-8" } }, ]; // Act @@ -320,4 +320,92 @@ public void ItCanBeSerializeAndDeserialized() Assert.Single(annotationContent.Metadata); Assert.Equal("metadata-value-8", annotationContent.Metadata["metadata-key-8"]?.ToString()); } + + [Fact] + public void ItCanBePolymorphicallySerializedAndDeserializedAsKernelContentType() + { + // Arrange + KernelContent sut = new ChatMessageContent(AuthorRole.User, "test-content", "test-model", metadata: new Dictionary() + { + ["test-metadata-key"] = "test-metadata-value" + }) + { + MimeType = "test-mime-type" + }; + + // Act + var json = JsonSerializer.Serialize(sut); + + var deserialized = JsonSerializer.Deserialize(json)!; + + // Assert + Assert.IsType(deserialized); + Assert.Equal("test-content", ((ChatMessageContent)deserialized).Content); + Assert.Equal("test-model", deserialized.ModelId); + Assert.Equal("test-mime-type", deserialized.MimeType); + Assert.NotNull(deserialized.Metadata); + Assert.Single(deserialized.Metadata); + Assert.Equal("test-metadata-value", deserialized.Metadata["test-metadata-key"]?.ToString()); + } + + [Fact] + public void UnknownDerivativeCanBePolymorphicallySerializedAndDeserializedAsChatMessageContentType() + { + // Arrange + KernelContent sut = new UnknownExternalChatMessageContent(AuthorRole.User, "test-content") + { + MimeType = "test-mime-type", + }; + + // Act + var json = JsonSerializer.Serialize(sut); + + var deserialized = JsonSerializer.Deserialize(json)!; + + // Assert + Assert.IsType(deserialized); + Assert.Equal("test-content", ((ChatMessageContent)deserialized).Content); + Assert.Equal("test-mime-type", deserialized.MimeType); + } + + [Fact] + public void ItCanBeSerializeAndDeserializedWithFunctionResultOfChatMessageType() + { + // Arrange + ChatMessageContentItemCollection items = [ + new FunctionResultContent(new FunctionCallContent("function-name-1", "plugin-name-1", "function-id-1"), new ChatMessageContent(AuthorRole.User, "test-content-1")), + new FunctionResultContent(new FunctionCallContent("function-name-2", "plugin-name-2", "function-id-2"), new UnknownExternalChatMessageContent(AuthorRole.Assistant, "test-content-2")), + ]; + + // Act + var chatMessageJson = JsonSerializer.Serialize(new ChatMessageContent(AuthorRole.User, items: items, "message-model")); + + var deserializedMessage = JsonSerializer.Deserialize(chatMessageJson)!; + + // Assert + var functionResultContentWithResultOfChatMessageContentType = deserializedMessage.Items[0] as FunctionResultContent; + Assert.NotNull(functionResultContentWithResultOfChatMessageContentType); + Assert.Equal("function-name-1", functionResultContentWithResultOfChatMessageContentType.FunctionName); + Assert.Equal("function-id-1", functionResultContentWithResultOfChatMessageContentType.CallId); + Assert.Equal("plugin-name-1", functionResultContentWithResultOfChatMessageContentType.PluginName); + var chatMessageContent = Assert.IsType(functionResultContentWithResultOfChatMessageContentType.Result); + Assert.Equal("user", chatMessageContent.GetProperty("Role").GetProperty("Label").GetString()); + Assert.Equal("test-content-1", chatMessageContent.GetProperty("Items")[0].GetProperty("Text").GetString()); + + var functionResultContentWithResultOfUnknownChatMessageContentType = deserializedMessage.Items[1] as FunctionResultContent; + Assert.NotNull(functionResultContentWithResultOfUnknownChatMessageContentType); + Assert.Equal("function-name-2", functionResultContentWithResultOfUnknownChatMessageContentType.FunctionName); + Assert.Equal("function-id-2", functionResultContentWithResultOfUnknownChatMessageContentType.CallId); + Assert.Equal("plugin-name-2", functionResultContentWithResultOfUnknownChatMessageContentType.PluginName); + var unknownChatMessageContent = Assert.IsType(functionResultContentWithResultOfUnknownChatMessageContentType.Result); + Assert.Equal("assistant", unknownChatMessageContent.GetProperty("Role").GetProperty("Label").GetString()); + Assert.Equal("test-content-2", unknownChatMessageContent.GetProperty("Items")[0].GetProperty("Text").GetString()); + } + + private sealed class UnknownExternalChatMessageContent : ChatMessageContent + { + public UnknownExternalChatMessageContent(AuthorRole role, string? content) : base(role, content) + { + } + } } diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentTests.cs index 8ceac9ab6bcb..6b68b59faa6d 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentTests.cs @@ -70,11 +70,10 @@ public async Task ItShouldHandleFunctionCallRequestExceptionAsync() }; // Act - var resultContent = await sut.InvokeAsync(kernel); + var exception = await Assert.ThrowsAsync(() => sut.InvokeAsync(kernel)); // Assert - Assert.NotNull(resultContent); - Assert.Equal("Error: Function call arguments were invalid JSON.", resultContent.Result); + Assert.Equal("Error: Function call arguments were invalid JSON.", exception.Message); } [Fact] diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs new file mode 100644 index 000000000000..eb954752ce4b --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingAnnotationContentTests.cs @@ -0,0 +1,71 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Text; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.UnitTests.Contents; + +#pragma warning disable SKEXP0110 + +/// +/// Unit testing of . +/// +public class StreamingAnnotationContentTests +{ + /// + /// Verify default state. + /// + [Fact] + public void VerifyStreamingAnnotationContentInitialState() + { + StreamingAnnotationContent definition = new(); + + Assert.Empty(definition.Quote); + Assert.Equal(0, definition.StartIndex); + Assert.Equal(0, definition.EndIndex); + Assert.Null(definition.FileId); + } + + /// + /// Verify usage. + /// + [Fact] + public void VerifyStreamingAnnotationContentWithFileId() + { + StreamingAnnotationContent definition = + new("test quote") + { + StartIndex = 33, + EndIndex = 49, + FileId = "#id", + }; + + Assert.Equal("test quote", definition.Quote); + Assert.Equal(33, definition.StartIndex); + Assert.Equal(49, definition.EndIndex); + Assert.Equal("#id", definition.FileId); + Assert.Equal("test quote: #id", definition.ToString()); + Assert.Equal("test quote: #id", Encoding.UTF8.GetString(definition.ToByteArray())); + } + + /// + /// Verify usage. + /// + [Fact] + public void VerifyStreamingAnnotationContentWithoutFileId() + { + StreamingAnnotationContent definition = + new("test quote") + { + StartIndex = 33, + EndIndex = 49, + }; + + Assert.Equal("test quote", definition.Quote); + Assert.Equal(33, definition.StartIndex); + Assert.Equal(49, definition.EndIndex); + Assert.Null(definition.FileId); + Assert.Equal("test quote", definition.ToString()); + Assert.Equal("test quote", Encoding.UTF8.GetString(definition.ToByteArray())); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingFileReferenceContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingFileReferenceContentTests.cs new file mode 100644 index 000000000000..a5105bf48ae5 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingFileReferenceContentTests.cs @@ -0,0 +1,37 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. +using System.Text; +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Contents; + +#pragma warning disable SKEXP0110 + +/// +/// Unit testing of . +/// +public class StreamingFileReferenceContentTests +{ + /// + /// Verify default state. + /// + [Fact] + public void VerifyStreamingFileReferenceContentInitialState() + { + StreamingFileReferenceContent definition = new(); + + Assert.Empty(definition.FileId); + } + /// + /// Verify usage. + /// + [Fact] + public void VerifyStreamingFileReferenceContentUsage() + { + StreamingFileReferenceContent definition = new(fileId: "testfile"); + + Assert.Equal("testfile", definition.FileId); + Assert.Equal("testfile", definition.ToString()); + Assert.Equal("testfile", Encoding.UTF8.GetString(definition.ToByteArray())); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs new file mode 100644 index 000000000000..903bc8c4341a --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordMappingTests.cs @@ -0,0 +1,169 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.UnitTests.Data; + +public class VectorStoreRecordMappingTests +{ + [Fact] + public void BuildPropertiesInfoWithValuesShouldBuildPropertiesInfo() + { + // Arrange. + var dataModelPropertiesInfo = new[] + { + typeof(DataModel).GetProperty(nameof(DataModel.Key))!, + typeof(DataModel).GetProperty(nameof(DataModel.Data))! + }; + var dataModelToStorageNameMapping = new Dictionary + { + { nameof(DataModel.Key), "key" }, + { nameof(DataModel.Data), "data" }, + }; + var storageValues = new Dictionary + { + { "key", "key value" }, + { "data", "data value" }, + }; + + // Act. + var propertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( + dataModelPropertiesInfo, + dataModelToStorageNameMapping, + storageValues); + + // Assert. + var propertiesInfoWithValuesArray = propertiesInfoWithValues.ToArray(); + Assert.Equal(2, propertiesInfoWithValuesArray.Length); + Assert.Equal(dataModelPropertiesInfo[0], propertiesInfoWithValuesArray[0].Key); + Assert.Equal("key value", propertiesInfoWithValuesArray[0].Value); + Assert.Equal(dataModelPropertiesInfo[1], propertiesInfoWithValuesArray[1].Key); + Assert.Equal("data value", propertiesInfoWithValuesArray[1].Value); + } + + [Fact] + public void BuildPropertiesInfoWithValuesShouldUseValueMapperIfProvided() + { + // Arrange. + var dataModelPropertiesInfo = new[] + { + typeof(DataModel).GetProperty(nameof(DataModel.Key))!, + typeof(DataModel).GetProperty(nameof(DataModel.Data))! + }; + var dataModelToStorageNameMapping = new Dictionary + { + { nameof(DataModel.Key), "key" }, + { nameof(DataModel.Data), "data" }, + }; + var storageValues = new Dictionary + { + { "key", 10 }, + { "data", 20 }, + }; + + // Act. + var propertiesInfoWithValues = VectorStoreRecordMapping.BuildPropertiesInfoWithValues( + dataModelPropertiesInfo, + dataModelToStorageNameMapping, + storageValues, + (int value, Type type) => value.ToString()); + + // Assert. + var propertiesInfoWithValuesArray = propertiesInfoWithValues.ToArray(); + Assert.Equal(2, propertiesInfoWithValuesArray.Length); + Assert.Equal(dataModelPropertiesInfo[0], propertiesInfoWithValuesArray[0].Key); + Assert.Equal("10", propertiesInfoWithValuesArray[0].Value); + Assert.Equal(dataModelPropertiesInfo[1], propertiesInfoWithValuesArray[1].Key); + Assert.Equal("20", propertiesInfoWithValuesArray[1].Value); + } + + [Fact] + public void SetPropertiesOnRecordShouldSetProperties() + { + // Arrange. + var record = new DataModel(); + + // Act. + VectorStoreRecordMapping.SetPropertiesOnRecord(record, new[] + { + new KeyValuePair(typeof(DataModel).GetProperty(nameof(DataModel.Key))!, "key value"), + new KeyValuePair(typeof(DataModel).GetProperty(nameof(DataModel.Data))!, "data value"), + }); + + // Assert. + Assert.Equal("key value", record.Key); + Assert.Equal("data value", record.Data); + } + + [Theory] + [InlineData(typeof(List))] + [InlineData(typeof(ICollection))] + [InlineData(typeof(IEnumerable))] + [InlineData(typeof(IList))] + [InlineData(typeof(IReadOnlyCollection))] + [InlineData(typeof(IReadOnlyList))] + [InlineData(typeof(string[]))] + [InlineData(typeof(IEnumerable))] + [InlineData(typeof(ArrayList))] + public void CreateEnumerableCanCreateEnumerablesOfAllRequiredTypes(Type expectedType) + { + // Arrange. + IEnumerable input = new List { "one", "two", "three", "four" }; + + // Act. + var actual = VectorStoreRecordMapping.CreateEnumerable(input, expectedType); + + // Assert. + Assert.True(expectedType.IsAssignableFrom(actual!.GetType())); + } + + [Theory] + [InlineData(typeof(List))] + [InlineData(typeof(ICollection))] + [InlineData(typeof(IEnumerable))] + [InlineData(typeof(IList))] + [InlineData(typeof(IReadOnlyCollection))] + [InlineData(typeof(IReadOnlyList))] + [InlineData(typeof(string[]))] + [InlineData(typeof(IEnumerable))] + [InlineData(typeof(ArrayList))] + public void CreateEnumerableCanCreateEnumerablesOfAllRequiredTypesUsingObjectEnumerable(Type expectedType) + { + // Arrange. + IEnumerable input = new List { "one", "two", "three", "four" }; + + // Act. + var actual = VectorStoreRecordMapping.CreateEnumerable(input, expectedType); + + // Assert. + Assert.True(expectedType.IsAssignableFrom(actual!.GetType())); + } + + [Theory] + [InlineData(typeof(string))] + [InlineData(typeof(HashSet))] + [InlineData(typeof(ISet))] + [InlineData(typeof(Dictionary))] + [InlineData(typeof(Stack))] + [InlineData(typeof(Queue))] + public void CreateEnumerableThrowsForUnsupportedType(Type expectedType) + { + // Arrange. + IEnumerable input = new List { "one", "two", "three", "four" }; + + // Act & Assert. + Assert.Throws(() => VectorStoreRecordMapping.CreateEnumerable(input, expectedType)); + } + + private sealed class DataModel + { + public string Key { get; set; } = string.Empty; + public string Data { get; set; } = string.Empty; + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs index cfddd8437425..d7128f92fa1f 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyReaderTests.cs @@ -2,8 +2,6 @@ using System; using System.Collections.Generic; -using System.Linq; -using System.Reflection; using System.Text.Json; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.Data; @@ -13,326 +11,469 @@ namespace SemanticKernel.UnitTests.Data; public class VectorStoreRecordPropertyReaderTests { - [Fact] - public void SplitDefinitionsAndVerifyReturnsProperties() + [Theory] + [MemberData(nameof(NoKeyTypeAndDefinitionCombos))] + public void ConstructorFailsForNoKey(Type type, VectorStoreRecordDefinition? definition) + { + // Act & Assert. + var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, null)); + Assert.Equal("No key property found on type NoKeyModel or the provided VectorStoreRecordDefinition.", exception.Message); + } + + [Theory] + [MemberData(nameof(MultiKeysTypeAndDefinitionCombos))] + public void ConstructorSucceedsForSupportedMultiKeys(Type type, VectorStoreRecordDefinition? definition) + { + // Act & Assert. + var sut = new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleKeys = true }); + } + + [Theory] + [MemberData(nameof(MultiKeysTypeAndDefinitionCombos))] + public void ConstructorFailsForUnsupportedMultiKeys(Type type, VectorStoreRecordDefinition? definition) + { + // Act & Assert. + var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleKeys = false })); + Assert.Equal("Multiple key properties found on type MultiKeysModel or the provided VectorStoreRecordDefinition.", exception.Message); + } + + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void ConstructorSucceedsForSupportedMultiVectors(Type type, VectorStoreRecordDefinition? definition) + { + // Act & Assert. + var sut = new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleVectors = true }); + } + + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void ConstructorFailsForUnsupportedMultiVectors(Type type, VectorStoreRecordDefinition? definition) { + // Act & Assert. + var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { SupportsMultipleVectors = false })); + Assert.Equal("Multiple vector properties found on type MultiPropsModel or the provided VectorStoreRecordDefinition while only one is supported.", exception.Message); + } + + [Theory] + [MemberData(nameof(NoVectorsTypeAndDefinitionCombos))] + public void ConstructorFailsForUnsupportedNoVectors(Type type, VectorStoreRecordDefinition? definition) + { + // Act & Assert. + var exception = Assert.Throws(() => new VectorStoreRecordPropertyReader(type, definition, new VectorStoreRecordPropertyReaderOptions { RequiresAtLeastOneVector = true })); + Assert.Equal("No vector property found on type NoVectorModel or the provided VectorStoreRecordDefinition while at least one is required.", exception.Message); + } + + [Theory] + [MemberData(nameof(TypeAndDefinitionCombos))] + public void CanGetDefinition(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify("testType", this._multiPropsDefinition, true, true); + var actual = sut.RecordDefinition; // Assert. - Assert.Equal("Key", properties.KeyProperty.DataModelPropertyName); - Assert.Equal(2, properties.DataProperties.Count); - Assert.Equal(2, properties.VectorProperties.Count); - Assert.Equal("Data1", properties.DataProperties[0].DataModelPropertyName); - Assert.Equal("Data2", properties.DataProperties[1].DataModelPropertyName); - Assert.Equal("Vector1", properties.VectorProperties[0].DataModelPropertyName); - Assert.Equal("Vector2", properties.VectorProperties[1].DataModelPropertyName); + Assert.NotNull(actual); } [Theory] - [InlineData(false, true, "MultiProps")] - [InlineData(true, true, "NoKey")] - [InlineData(true, true, "MultiKeys")] - [InlineData(false, true, "NoVector")] - [InlineData(true, true, "NoVector")] - public void SplitDefinitionsAndVerifyThrowsForInvalidModel(bool supportsMultipleVectors, bool requiresAtLeastOneVector, string definitionName) + [MemberData(nameof(TypeAndDefinitionCombos))] + public void CanGetKeyPropertyInfo(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var definition = definitionName switch - { - "MultiProps" => this._multiPropsDefinition, - "NoKey" => this._noKeyDefinition, - "MultiKeys" => this._multiKeysDefinition, - "NoVector" => this._noVectorDefinition, - _ => throw new ArgumentException("Invalid definition.") - }; + var sut = new VectorStoreRecordPropertyReader(type, definition, null); - // Act & Assert. - Assert.Throws(() => VectorStoreRecordPropertyReader.SplitDefinitionAndVerify("testType", definition, supportsMultipleVectors, requiresAtLeastOneVector)); + // Act. + var actual = sut.KeyPropertyInfo; + + // Assert. + Assert.NotNull(actual); + Assert.Equal("Key", actual.Name); + Assert.Equal(typeof(string), actual.PropertyType); } [Theory] - [InlineData(true, false)] - [InlineData(false, false)] - [InlineData(true, true)] - [InlineData(false, true)] - public void FindPropertiesCanFindAllPropertiesOnSinglePropsModel(bool supportsMultipleVectors, bool useConfig) + [MemberData(nameof(TypeAndDefinitionCombos))] + public void CanGetKeyPropertiesInfo(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var properties = useConfig ? - VectorStoreRecordPropertyReader.FindProperties(typeof(SinglePropsModel), this._singlePropsDefinition, supportsMultipleVectors) : - VectorStoreRecordPropertyReader.FindProperties(typeof(SinglePropsModel), supportsMultipleVectors); + var actual = sut.KeyPropertiesInfo; // Assert. - Assert.Equal("Key", properties.KeyProperty.Name); - Assert.Single(properties.DataProperties); - Assert.Single(properties.VectorProperties); - Assert.Equal("Data", properties.DataProperties[0].Name); - Assert.Equal("Vector", properties.VectorProperties[0].Name); + Assert.NotNull(actual); + Assert.Single(actual); + Assert.Equal("Key", actual[0].Name); + Assert.Equal(typeof(string), actual[0].PropertyType); } [Theory] - [InlineData(true)] - [InlineData(false)] - public void FindPropertiesCanFindAllPropertiesOnMultiPropsModel(bool useConfig) + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetDataPropertiesInfo(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var properties = useConfig ? - VectorStoreRecordPropertyReader.FindProperties(typeof(MultiPropsModel), this._multiPropsDefinition, true) : - VectorStoreRecordPropertyReader.FindProperties(typeof(MultiPropsModel), true); + var actual = sut.DataPropertiesInfo; // Assert. - Assert.Equal("Key", properties.KeyProperty.Name); - Assert.Equal(2, properties.DataProperties.Count); - Assert.Equal(2, properties.VectorProperties.Count); - Assert.Equal("Data1", properties.DataProperties[0].Name); - Assert.Equal("Data2", properties.DataProperties[1].Name); - Assert.Equal("Vector1", properties.VectorProperties[0].Name); - Assert.Equal("Vector2", properties.VectorProperties[1].Name); + Assert.NotNull(actual); + Assert.Equal(2, actual.Count); + Assert.Equal("Data1", actual[0].Name); + Assert.Equal(typeof(string), actual[0].PropertyType); + Assert.Equal("Data2", actual[1].Name); + Assert.Equal(typeof(string), actual[1].PropertyType); } [Theory] - [InlineData(true)] - [InlineData(false)] - public void FindPropertiesThrowsForMultipleVectorsWithSingleVectorSupport(bool useConfig) + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetVectorPropertiesInfo(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var ex = useConfig ? - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(MultiPropsModel), this._multiPropsDefinition, false)) : - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(MultiPropsModel), false)); + var actual = sut.VectorPropertiesInfo; // Assert. - var expectedMessage = useConfig ? - "Multiple vector properties configured for type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+MultiPropsModel while only one is supported." : - "Multiple vector properties found on type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+MultiPropsModel while only one is supported."; - Assert.Equal(expectedMessage, ex.Message); + Assert.NotNull(actual); + Assert.Equal(2, actual.Count); + Assert.Equal("Vector1", actual[0].Name); + Assert.Equal(typeof(ReadOnlyMemory), actual[0].PropertyType); + Assert.Equal("Vector2", actual[1].Name); + Assert.Equal(typeof(ReadOnlyMemory), actual[1].PropertyType); } [Theory] - [InlineData(true)] - [InlineData(false)] - public void FindPropertiesThrowsOnMultipleKeyProperties(bool useConfig) + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetFirstVectorPropertyName(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var ex = useConfig ? - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(MultiKeysModel), this._multiKeysDefinition, true)) : - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(MultiKeysModel), true)); + var actual = sut.FirstVectorPropertyName; // Assert. - var expectedMessage = useConfig ? - "Multiple key properties configured for type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+MultiKeysModel." : - "Multiple key properties found on type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+MultiKeysModel."; - Assert.Equal(expectedMessage, ex.Message); + Assert.Equal("Vector1", actual); } [Theory] - [InlineData(true)] - [InlineData(false)] - public void FindPropertiesThrowsOnNoKeyProperty(bool useConfig) + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetFirstVectorPropertyInfo(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var ex = useConfig ? - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(NoKeyModel), this._noKeyDefinition, true)) : - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(NoKeyModel), true)); + var actual = sut.FirstVectorPropertyInfo; // Assert. - var expectedMessage = useConfig ? - "No key property configured for type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+NoKeyModel." : - "No key property found on type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+NoKeyModel."; - Assert.Equal(expectedMessage, ex.Message); + Assert.NotNull(actual); + Assert.Equal("Vector1", actual.Name); + Assert.Equal(typeof(ReadOnlyMemory), actual.PropertyType); } [Theory] - [InlineData(true)] - [InlineData(false)] - public void FindPropertiesThrowsOnNoVectorPropertyWithSingleVectorSupport(bool useConfig) + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetKeyPropertyName(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var ex = useConfig ? - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(NoVectorModel), this._noVectorDefinition, false)) : - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(NoVectorModel), false)); + var actual = sut.KeyPropertyName; // Assert. - var expectedMessage = useConfig ? - "No vector property configured for type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+NoVectorModel." : - "No vector property found on type SemanticKernel.UnitTests.Data.VectorStoreRecordPropertyReaderTests+NoVectorModel."; - Assert.Equal(expectedMessage, ex.Message); + Assert.Equal("Key", actual); } [Theory] - [InlineData("Key", "MissingKey")] - [InlineData("Data", "MissingData")] - [InlineData("Vector", "MissingVector")] - public void FindPropertiesUsingConfigThrowsForNotFoundProperties(string propertyType, string propertyName) + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetKeyPropertyStoragePropertyNameWithoutOverride(Type type, VectorStoreRecordDefinition? definition) { - var missingKeyDefinition = new VectorStoreRecordDefinition { Properties = [new VectorStoreRecordKeyProperty(propertyName, typeof(string))] }; - var missingDataDefinition = new VectorStoreRecordDefinition { Properties = [new VectorStoreRecordDataProperty(propertyName, typeof(string))] }; - var missingVectorDefinition = new VectorStoreRecordDefinition { Properties = [new VectorStoreRecordVectorProperty(propertyName, typeof(ReadOnlyMemory))] }; + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var definition = propertyType switch - { - "Key" => missingKeyDefinition, - "Data" => missingDataDefinition, - "Vector" => missingVectorDefinition, - _ => throw new ArgumentException("Invalid property type.") - }; + // Act. + var actual = sut.KeyPropertyStoragePropertyName; - Assert.Throws(() => VectorStoreRecordPropertyReader.FindProperties(typeof(NoKeyModel), definition, false)); + // Assert. + Assert.Equal("Key", actual); } - [Fact] - public void CreateVectorStoreRecordDefinitionFromTypeConvertsAllProps() + [Theory] + [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] + public void CanGetKeyPropertyStoragePropertyNameWithOverride(Type type, VectorStoreRecordDefinition? definition) { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + // Act. - var definition = VectorStoreRecordPropertyReader.CreateVectorStoreRecordDefinitionFromType(typeof(MultiPropsModel), true); + var actual = sut.KeyPropertyStoragePropertyName; // Assert. - Assert.Equal(5, definition.Properties.Count); - Assert.Equal("Key", definition.Properties[0].DataModelPropertyName); - Assert.Equal("Data1", definition.Properties[1].DataModelPropertyName); - Assert.Equal("Data2", definition.Properties[2].DataModelPropertyName); - Assert.Equal("Vector1", definition.Properties[3].DataModelPropertyName); - Assert.Equal("Vector2", definition.Properties[4].DataModelPropertyName); + Assert.Equal("storage_key", actual); + } - Assert.IsType(definition.Properties[0]); - Assert.IsType(definition.Properties[1]); - Assert.IsType(definition.Properties[2]); - Assert.IsType(definition.Properties[3]); - Assert.IsType(definition.Properties[4]); + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetDataPropertyStoragePropertyNameWithOverrideMix(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var data1 = (VectorStoreRecordDataProperty)definition.Properties[1]; - var data2 = (VectorStoreRecordDataProperty)definition.Properties[2]; + // Act. + var actual = sut.DataPropertyStoragePropertyNames; - Assert.True(data1.IsFilterable); - Assert.False(data2.IsFilterable); + // Assert. + Assert.Equal("Data1", actual[0]); + Assert.Equal("storage_data2", actual[1]); + } + + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetVectorPropertyStoragePropertyNameWithOverrideMix(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act. + var actual = sut.VectorPropertyStoragePropertyNames; - Assert.True(data1.IsFullTextSearchable); - Assert.False(data2.IsFullTextSearchable); + // Assert. + Assert.Equal("Vector1", actual[0]); + Assert.Equal("storage_vector2", actual[1]); + } - Assert.Equal(typeof(string), data1.PropertyType); - Assert.Equal(typeof(string), data2.PropertyType); + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetKeyPropertyJsonNameWithoutOverride(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); - var vector1 = (VectorStoreRecordVectorProperty)definition.Properties[3]; + // Act. + var actual = sut.KeyPropertyJsonName; - Assert.Equal(4, vector1.Dimensions); + // Assert. + Assert.Equal("Key", actual); } - [Fact] - public void VerifyPropertyTypesPassForAllowedTypes() + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void CanGetKeyPropertyJsonNameWithSerializerSettings(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var properties = VectorStoreRecordPropertyReader.FindProperties(typeof(SinglePropsModel), true); + var sut = new VectorStoreRecordPropertyReader(type, definition, new() + { + JsonSerializerOptions = new JsonSerializerOptions() + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseUpper + } + }); // Act. - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.DataProperties, [typeof(string)], "Data"); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(this._singlePropsDefinition.Properties.OfType(), [typeof(string)], "Data"); + var actual = sut.KeyPropertyJsonName; + + // Assert. + Assert.Equal("KEY", actual); } - [Fact] - public void VerifyPropertyTypesPassForAllowedEnumerableTypes() + [Theory] + [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] + public void CanGetKeyPropertyJsonNameWithOverride(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var properties = VectorStoreRecordPropertyReader.FindProperties(typeof(EnumerablePropsModel), true); + var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act. - VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.DataProperties, [typeof(string)], "Data", supportEnumerable: true); - VectorStoreRecordPropertyReader.VerifyPropertyTypes(this._enumerablePropsDefinition.Properties.OfType(), [typeof(string)], "Data", supportEnumerable: true); + var actual = sut.KeyPropertyJsonName; + + // Assert. + Assert.Equal("json_key", actual); } - [Fact] - public void VerifyPropertyTypesFailsForDisallowedTypes() + [Theory] + [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] + public void CanGetDataPropertyJsonNameWithOverride(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var properties = VectorStoreRecordPropertyReader.FindProperties(typeof(SinglePropsModel), true); + var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act. - var ex1 = Assert.Throws(() => VectorStoreRecordPropertyReader.VerifyPropertyTypes(properties.DataProperties, [typeof(int), typeof(float)], "Data")); - var ex2 = Assert.Throws(() => VectorStoreRecordPropertyReader.VerifyPropertyTypes(this._singlePropsDefinition.Properties.OfType(), [typeof(int), typeof(float)], "Data")); + var actual = sut.DataPropertyJsonNames; // Assert. - Assert.Equal("Data properties must be one of the supported types: System.Int32, System.Single. Type of the property 'Data' is System.String.", ex1.Message); - Assert.Equal("Data properties must be one of the supported types: System.Int32, System.Single. Type of the property 'Data' is System.String.", ex2.Message); + Assert.NotNull(actual); + Assert.Equal(2, actual.Count); + Assert.Equal("json_data1", actual[0]); + Assert.Equal("json_data2", actual[1]); } - [Fact] - public void VerifyStoragePropertyNameMapChecksStorageNameAndFallsBackToPropertyName() + [Theory] + [MemberData(nameof(StorageNamesPropsTypeAndDefinitionCombos))] + public void CanGetVectorPropertyJsonNameWithOverride(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify("testType", this._multiPropsDefinition, true, true); + var sut = new VectorStoreRecordPropertyReader(type, definition, null); // Act. - var storageNameMap = VectorStoreRecordPropertyReader.BuildPropertyNameToStorageNameMap(properties); + var actual = sut.VectorPropertyJsonNames; // Assert. - Assert.Equal(5, storageNameMap.Count); + Assert.NotNull(actual); + Assert.Single(actual); + Assert.Equal("json_vector", actual[0]); + } - // From Property Names. - Assert.Equal("Key", storageNameMap["Key"]); - Assert.Equal("Data1", storageNameMap["Data1"]); - Assert.Equal("Vector1", storageNameMap["Vector1"]); - Assert.Equal("Vector2", storageNameMap["Vector2"]); + [Theory] + [MemberData(nameof(TypeAndDefinitionCombos))] + public void VerifyKeyPropertiesPassesForAllowedTypes(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var allowedTypes = new HashSet { typeof(string), typeof(int) }; - // From storage property name on vector store record data property. - Assert.Equal("data_2", storageNameMap["Data2"]); + // Act. + sut.VerifyKeyProperties(allowedTypes); } - [Fact] - public void VerifyGetJsonPropertyNameChecksJsonOptionsAndJsonAttributesAndFallsBackToPropertyName() + [Theory] + [MemberData(nameof(TypeAndDefinitionCombos))] + public void VerifyKeyPropertiesFailsForDisallowedTypes(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var options = new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseUpper }; - var properties = VectorStoreRecordPropertyReader.FindProperties(typeof(MultiPropsModel), true); - var allProperties = (new PropertyInfo[] { properties.KeyProperty }) - .Concat(properties.DataProperties) - .Concat(properties.VectorProperties); + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var allowedTypes = new HashSet { typeof(long) }; // Act. - var jsonNameMap = allProperties - .Select(p => new { PropertyName = p.Name, JsonName = VectorStoreRecordPropertyReader.GetJsonPropertyName(options, p) }) - .ToDictionary(p => p.PropertyName, p => p.JsonName); + var exception = Assert.Throws(() => sut.VerifyKeyProperties(allowedTypes)); + Assert.Equal("Key properties must be one of the supported types: System.Int64. Type of the property 'Key' is System.String.", exception.Message); + } - // Assert. - Assert.Equal(5, jsonNameMap.Count); + [Theory] + [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] + public void VerifyDataPropertiesPassesForAllowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var allowedTypes = new HashSet { typeof(string), typeof(int) }; + + // Act. + sut.VerifyDataProperties(allowedTypes, true); + } - // From JsonNamingPolicy. - Assert.Equal("KEY", jsonNameMap["Key"]); - Assert.Equal("DATA1", jsonNameMap["Data1"]); - Assert.Equal("DATA2", jsonNameMap["Data2"]); - Assert.Equal("VECTOR1", jsonNameMap["Vector1"]); + [Theory] + [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] + public void VerifyDataPropertiesFailsForDisallowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var allowedTypes = new HashSet { typeof(string), typeof(int) }; - // From JsonPropertyName attribute. - Assert.Equal("vector-2", jsonNameMap["Vector2"]); + // Act. + var exception = Assert.Throws(() => sut.VerifyDataProperties(allowedTypes, false)); + Assert.Equal("Data properties must be one of the supported types: System.String, System.Int32. Type of the property 'EnumerableData' is System.Collections.Generic.IEnumerable`1[[System.String, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]].", exception.Message); } - [Fact] - public void VerifyBuildPropertyNameToJsonPropertyNameMapChecksJsonAttributesAndJsonOptionsAndFallsbackToPropertyNames() + [Theory] + [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] + public void VerifyVectorPropertiesPassesForAllowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) { // Arrange. - var options = new JsonSerializerOptions() { PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseUpper }; - var properties = VectorStoreRecordPropertyReader.SplitDefinitionAndVerify("testType", this._multiPropsDefinition, true, true); - var propertiesInfo = VectorStoreRecordPropertyReader.FindProperties(typeof(MultiPropsModel), true); + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var allowedTypes = new HashSet { typeof(ReadOnlyMemory) }; // Act. - var jsonNameMap1 = VectorStoreRecordPropertyReader.BuildPropertyNameToJsonPropertyNameMap(properties, typeof(MultiPropsModel), options); - var jsonNameMap2 = VectorStoreRecordPropertyReader.BuildPropertyNameToJsonPropertyNameMap(propertiesInfo, typeof(MultiPropsModel), options); + sut.VerifyVectorProperties(allowedTypes); + } - void assertJsonNameMap(Dictionary jsonNameMap) - { - Assert.Equal(5, jsonNameMap.Count); + [Theory] + [MemberData(nameof(EnumerablePropsTypeAndDefinitionCombos))] + public void VerifyVectorPropertiesFailsForDisallowedEnumerableTypes(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + var allowedTypes = new HashSet { typeof(ReadOnlyMemory) }; - // From JsonNamingPolicy. - Assert.Equal("KEY", jsonNameMap["Key"]); - Assert.Equal("DATA1", jsonNameMap["Data1"]); - Assert.Equal("DATA2", jsonNameMap["Data2"]); - Assert.Equal("VECTOR1", jsonNameMap["Vector1"]); + // Act. + var exception = Assert.Throws(() => sut.VerifyVectorProperties(allowedTypes)); + Assert.Equal("Vector properties must be one of the supported types: System.ReadOnlyMemory`1[[System.Double, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]]. Type of the property 'Vector' is System.ReadOnlyMemory`1[[System.Single, System.Private.CoreLib, Version=8.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e]].", exception.Message); + } - // From JsonPropertyName attribute. - Assert.Equal("vector-2", jsonNameMap["Vector2"]); - }; + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void GetStoragePropertyNameReturnsStorageNameWithFallback(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); - // Assert. - assertJsonNameMap(jsonNameMap1); - assertJsonNameMap(jsonNameMap2); + // Act & Assert. + Assert.Equal("Data1", sut.GetStoragePropertyName("Data1")); + Assert.Equal("storage_data2", sut.GetStoragePropertyName("Data2")); + } + + [Theory] + [MemberData(nameof(MultiPropsTypeAndDefinitionCombos))] + public void GetJsonPropertyNameReturnsJsonWithFallback(Type type, VectorStoreRecordDefinition? definition) + { + // Arrange. + var sut = new VectorStoreRecordPropertyReader(type, definition, null); + + // Act & Assert. + Assert.Equal("Data1", sut.GetJsonPropertyName("Data1")); + Assert.Equal("json_data2", sut.GetJsonPropertyName("Data2")); + } + + public static IEnumerable NoKeyTypeAndDefinitionCombos() + { + yield return new object?[] { typeof(NoKeyModel), s_noKeyDefinition }; + yield return new object?[] { typeof(NoKeyModel), null }; + } + + public static IEnumerable NoVectorsTypeAndDefinitionCombos() + { + yield return new object?[] { typeof(NoVectorModel), s_noVectorDefinition }; + yield return new object?[] { typeof(NoVectorModel), null }; + } + + public static IEnumerable MultiKeysTypeAndDefinitionCombos() + { + yield return new object?[] { typeof(MultiKeysModel), s_multiKeysDefinition }; + yield return new object?[] { typeof(MultiKeysModel), null }; + } + + public static IEnumerable TypeAndDefinitionCombos() + { + yield return new object?[] { typeof(SinglePropsModel), s_singlePropsDefinition }; + yield return new object?[] { typeof(SinglePropsModel), null }; + yield return new object?[] { typeof(MultiPropsModel), s_multiPropsDefinition }; + yield return new object?[] { typeof(MultiPropsModel), null }; + yield return new object?[] { typeof(EnumerablePropsModel), s_enumerablePropsDefinition }; + yield return new object?[] { typeof(EnumerablePropsModel), null }; + } + + public static IEnumerable MultiPropsTypeAndDefinitionCombos() + { + yield return new object?[] { typeof(MultiPropsModel), s_multiPropsDefinition }; + yield return new object?[] { typeof(MultiPropsModel), null }; + } + + public static IEnumerable StorageNamesPropsTypeAndDefinitionCombos() + { + yield return new object?[] { typeof(StorageNamesPropsModel), s_storageNamesPropsDefinition }; + yield return new object?[] { typeof(StorageNamesPropsModel), null }; + } + + public static IEnumerable EnumerablePropsTypeAndDefinitionCombos() + { + yield return new object?[] { typeof(EnumerablePropsModel), s_enumerablePropsDefinition }; + yield return new object?[] { typeof(EnumerablePropsModel), null }; } #pragma warning disable CA1812 // Invalid unused classes error, since I am using these for testing purposes above. @@ -341,7 +482,7 @@ private sealed class NoKeyModel { } - private readonly VectorStoreRecordDefinition _noKeyDefinition = new(); + private static readonly VectorStoreRecordDefinition s_noKeyDefinition = new(); private sealed class NoVectorModel { @@ -349,7 +490,7 @@ private sealed class NoVectorModel public string Key { get; set; } = string.Empty; } - private readonly VectorStoreRecordDefinition _noVectorDefinition = new() + private static readonly VectorStoreRecordDefinition s_noVectorDefinition = new() { Properties = [ @@ -366,7 +507,7 @@ private sealed class MultiKeysModel public string Key2 { get; set; } = string.Empty; } - private readonly VectorStoreRecordDefinition _multiKeysDefinition = new() + private static readonly VectorStoreRecordDefinition s_multiKeysDefinition = new() { Properties = [ @@ -389,7 +530,7 @@ private sealed class SinglePropsModel public string NotAnnotated { get; set; } = string.Empty; } - private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() + private static readonly VectorStoreRecordDefinition s_singlePropsDefinition = new() { Properties = [ @@ -407,28 +548,29 @@ private sealed class MultiPropsModel [VectorStoreRecordData(IsFilterable = true, IsFullTextSearchable = true)] public string Data1 { get; set; } = string.Empty; - [VectorStoreRecordData] + [VectorStoreRecordData(StoragePropertyName = "storage_data2")] + [JsonPropertyName("json_data2")] public string Data2 { get; set; } = string.Empty; [VectorStoreRecordVector(4, IndexKind.Flat, DistanceFunction.DotProductSimilarity)] public ReadOnlyMemory Vector1 { get; set; } - [VectorStoreRecordVector] - [JsonPropertyName("vector-2")] + [VectorStoreRecordVector(StoragePropertyName = "storage_vector2")] + [JsonPropertyName("json_vector2")] public ReadOnlyMemory Vector2 { get; set; } public string NotAnnotated { get; set; } = string.Empty; } - private readonly VectorStoreRecordDefinition _multiPropsDefinition = new() + private static readonly VectorStoreRecordDefinition s_multiPropsDefinition = new() { Properties = [ new VectorStoreRecordKeyProperty("Key", typeof(string)), new VectorStoreRecordDataProperty("Data1", typeof(string)) { IsFilterable = true, IsFullTextSearchable = true }, - new VectorStoreRecordDataProperty("Data2", typeof(string)) { StoragePropertyName = "data_2" }, + new VectorStoreRecordDataProperty("Data2", typeof(string)) { StoragePropertyName = "storage_data2" }, new VectorStoreRecordVectorProperty("Vector1", typeof(ReadOnlyMemory)) { Dimensions = 4, IndexKind = IndexKind.Flat, DistanceFunction = DistanceFunction.DotProductSimilarity }, - new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) + new VectorStoreRecordVectorProperty("Vector2", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_vector2" } ] }; @@ -452,7 +594,7 @@ private sealed class EnumerablePropsModel public string NotAnnotated { get; set; } = string.Empty; } - private readonly VectorStoreRecordDefinition _enumerablePropsDefinition = new() + private static readonly VectorStoreRecordDefinition s_enumerablePropsDefinition = new() { Properties = [ @@ -464,5 +606,35 @@ private sealed class EnumerablePropsModel ] }; -#pragma warning restore CA1812 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. + private sealed class StorageNamesPropsModel + { + [VectorStoreRecordKey(StoragePropertyName = "storage_key")] + [JsonPropertyName("json_key")] + public string Key { get; set; } = string.Empty; + + [VectorStoreRecordData(StoragePropertyName = "storage_data1")] + [JsonPropertyName("json_data1")] + public string Data1 { get; set; } = string.Empty; + + [VectorStoreRecordData(StoragePropertyName = "storage_data2")] + [JsonPropertyName("json_data2")] + public string Data2 { get; set; } = string.Empty; + + [VectorStoreRecordVector(StoragePropertyName = "storage_vector")] + [JsonPropertyName("json_vector")] + public ReadOnlyMemory Vector { get; set; } + } + + private static readonly VectorStoreRecordDefinition s_storageNamesPropsDefinition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)) { StoragePropertyName = "storage_key" }, + new VectorStoreRecordDataProperty("Data1", typeof(string)) { StoragePropertyName = "storage_data1" }, + new VectorStoreRecordDataProperty("Data2", typeof(string)) { StoragePropertyName = "storage_data2" }, + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) { StoragePropertyName = "storage_vector" } + ] + }; + +#pragma warning restore CA1812 } diff --git a/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs new file mode 100644 index 000000000000..8a0e7ed65507 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Data/VectorStoreRecordPropertyVerificationTests.cs @@ -0,0 +1,170 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Microsoft.SemanticKernel.Data; +using Xunit; + +namespace SemanticKernel.UnitTests.Data; + +public class VectorStoreRecordPropertyVerificationTests +{ + [Fact] + public void VerifyPropertyTypesPassForAllowedTypes() + { + // Arrange. + var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), null, null); + + // Act. + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(reader.DataProperties, [typeof(string)], "Data"); + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._singlePropsDefinition.Properties.OfType(), [typeof(string)], "Data"); + } + + [Fact] + public void VerifyPropertyTypesPassForAllowedEnumerableTypes() + { + // Arrange. + var reader = new VectorStoreRecordPropertyReader(typeof(EnumerablePropsModel), null, null); + + // Act. + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(reader.DataProperties, [typeof(string)], "Data", supportEnumerable: true); + VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._enumerablePropsDefinition.Properties.OfType(), [typeof(string)], "Data", supportEnumerable: true); + } + + [Fact] + public void VerifyPropertyTypesFailsForDisallowedTypes() + { + // Arrange. + var reader = new VectorStoreRecordPropertyReader(typeof(SinglePropsModel), null, null); + + // Act. + var ex1 = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyPropertyTypes(reader.DataProperties, [typeof(int), typeof(float)], "Data")); + var ex2 = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyPropertyTypes(this._singlePropsDefinition.Properties.OfType(), [typeof(int), typeof(float)], "Data")); + + // Assert. + Assert.Equal("Data properties must be one of the supported types: System.Int32, System.Single. Type of the property 'Data' is System.String.", ex1.Message); + Assert.Equal("Data properties must be one of the supported types: System.Int32, System.Single. Type of the property 'Data' is System.String.", ex2.Message); + } + + [Theory] + [InlineData(typeof(SinglePropsModel), false, new Type[] { typeof(string) }, false)] + [InlineData(typeof(VectorStoreGenericDataModel), false, new Type[] { typeof(string), typeof(ulong) }, false)] + [InlineData(typeof(VectorStoreGenericDataModel), true, new Type[] { typeof(string), typeof(ulong) }, false)] + [InlineData(typeof(VectorStoreGenericDataModel), false, new Type[] { typeof(string), typeof(ulong) }, true)] + public void VerifyGenericDataModelKeyTypeThrowsOnlyForUnsupportedKeyTypeWithoutCustomMapper(Type recordType, bool customMapperSupplied, IEnumerable allowedKeyTypes, bool shouldThrow) + { + if (shouldThrow) + { + var ex = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(recordType, customMapperSupplied, allowedKeyTypes)); + Assert.Equal("The key type 'System.Int32' of data model 'VectorStoreGenericDataModel' is not supported by the default mappers. Only the following key types are supported: System.String, System.UInt64. Please provide your own mapper to map to your chosen key type.", ex.Message); + } + else + { + VectorStoreRecordPropertyVerification.VerifyGenericDataModelKeyType(recordType, customMapperSupplied, allowedKeyTypes); + } + } + + [Theory] + [InlineData(typeof(SinglePropsModel), false, false)] + [InlineData(typeof(VectorStoreGenericDataModel), true, false)] + [InlineData(typeof(VectorStoreGenericDataModel), false, true)] + public void VerifyGenericDataModelDefinitionSuppliedThrowsOnlyForMissingDefinition(Type recordType, bool definitionSupplied, bool shouldThrow) + { + if (shouldThrow) + { + var ex = Assert.Throws(() => VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(recordType, definitionSupplied)); + Assert.Equal("A VectorStoreRecordDefinition must be provided when using 'VectorStoreGenericDataModel'.", ex.Message); + } + else + { + VectorStoreRecordPropertyVerification.VerifyGenericDataModelDefinitionSupplied(recordType, definitionSupplied); + } + } + + [Theory] + [InlineData(typeof(List), true)] + [InlineData(typeof(ICollection), true)] + [InlineData(typeof(IEnumerable), true)] + [InlineData(typeof(IList), true)] + [InlineData(typeof(IReadOnlyCollection), true)] + [InlineData(typeof(IReadOnlyList), true)] + [InlineData(typeof(string[]), true)] + [InlineData(typeof(IEnumerable), true)] + [InlineData(typeof(ArrayList), true)] + [InlineData(typeof(string), false)] + [InlineData(typeof(HashSet), false)] + [InlineData(typeof(ISet), false)] + [InlineData(typeof(Dictionary), false)] + [InlineData(typeof(Stack), false)] + [InlineData(typeof(Queue), false)] + public void IsSupportedEnumerableTypeReturnsCorrectAnswerForEachType(Type type, bool expected) + { + // Act. + var actual = VectorStoreRecordPropertyVerification.IsSupportedEnumerableType(type); + + // Assert. + Assert.Equal(expected, actual); + } + +#pragma warning disable CA1812 // Invalid unused classes error, since I am using these for testing purposes above. + + private sealed class SinglePropsModel + { + [VectorStoreRecordKey] + public string Key { get; set; } = string.Empty; + + [VectorStoreRecordData] + public string Data { get; set; } = string.Empty; + + [VectorStoreRecordVector] + public ReadOnlyMemory Vector { get; set; } + + public string NotAnnotated { get; set; } = string.Empty; + } + + private readonly VectorStoreRecordDefinition _singlePropsDefinition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("Data", typeof(string)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ] + }; + + private sealed class EnumerablePropsModel + { + [VectorStoreRecordKey] + public string Key { get; set; } = string.Empty; + + [VectorStoreRecordData] + public IEnumerable EnumerableData { get; set; } = new List(); + + [VectorStoreRecordData] + public string[] ArrayData { get; set; } = Array.Empty(); + + [VectorStoreRecordData] + public List ListData { get; set; } = new List(); + + [VectorStoreRecordVector] + public ReadOnlyMemory Vector { get; set; } + + public string NotAnnotated { get; set; } = string.Empty; + } + + private readonly VectorStoreRecordDefinition _enumerablePropsDefinition = new() + { + Properties = + [ + new VectorStoreRecordKeyProperty("Key", typeof(string)), + new VectorStoreRecordDataProperty("EnumerableData", typeof(IEnumerable)), + new VectorStoreRecordDataProperty("ArrayData", typeof(string[])), + new VectorStoreRecordDataProperty("ListData", typeof(List)), + new VectorStoreRecordVectorProperty("Vector", typeof(ReadOnlyMemory)) + ] + }; + +#pragma warning restore CA1812 +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs new file mode 100644 index 000000000000..f7a4e947ec38 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Extensions/ClientResultExceptionExtensionsTests.cs @@ -0,0 +1,72 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class ClientResultExceptionExtensionsTests +{ + [Fact] + public void ItCanRecoverFromResponseErrorAndConvertsToHttpOperationExceptionWithDefaultData() + { + // Arrange + var exception = new ClientResultException("message", ClientPipeline.Create().CreateMessage().Response); + + // Act + var httpOperationException = exception.ToHttpOperationException(); + + // Assert + Assert.NotNull(httpOperationException); + Assert.Equal(exception, httpOperationException.InnerException); + Assert.Equal(exception.Message, httpOperationException.Message); + Assert.Null(httpOperationException.ResponseContent); + Assert.Null(httpOperationException.StatusCode); + } + + [Fact] + public void ItCanProvideResponseContentAndStatusCode() + { + // Arrange + using var pipelineResponse = new MockPipelineResponse(); + + pipelineResponse.SetContent("content"); + pipelineResponse.SetStatus(200); + + var exception = new ClientResultException("message", pipelineResponse); + + // Act + var httpOperationException = exception.ToHttpOperationException(); + + // Assert + Assert.NotNull(httpOperationException); + Assert.NotNull(httpOperationException.StatusCode); + Assert.Equal(exception, httpOperationException.InnerException); + Assert.Equal(exception.Message, httpOperationException.Message); + Assert.Equal(pipelineResponse.Content.ToString(), httpOperationException.ResponseContent); + Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!); + } + + [Fact] + public void ItProvideStatusForResponsesWithoutContent() + { + // Arrange + using var pipelineResponse = new MockPipelineResponse(); + + pipelineResponse.SetStatus(200); + + var exception = new ClientResultException("message", pipelineResponse); + + // Act + var httpOperationException = exception.ToHttpOperationException(); + + // Assert + Assert.NotNull(httpOperationException); + Assert.NotNull(httpOperationException.StatusCode); + Assert.Empty(httpOperationException.ResponseContent!); + Assert.Equal(exception, httpOperationException.InnerException); + Assert.Equal(exception.Message, httpOperationException.Message); + Assert.Equal(pipelineResponse.Status, (int)httpOperationException.StatusCode!); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs index dc9db68b5836..31ceeac6015a 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelBuilderTests.cs @@ -7,6 +7,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; +using Microsoft.SemanticKernel.Connectors.AzureOpenAI; using Microsoft.SemanticKernel.Connectors.OpenAI; using Microsoft.SemanticKernel.TextGeneration; using Xunit; @@ -109,7 +110,7 @@ public void ItBuildsServicesIntoKernel() { var builder = Kernel.CreateBuilder() .AddOpenAIChatCompletion(modelId: "abcd", apiKey: "efg", serviceId: "openai") - .AddAzureOpenAITextGeneration(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); + .AddAzureOpenAIChatCompletion(deploymentName: "hijk", modelId: "qrs", endpoint: "https://lmnop", apiKey: "tuv", serviceId: "azureopenai"); builder.Services.AddSingleton(CultureInfo.InvariantCulture); builder.Services.AddSingleton(CultureInfo.CurrentCulture); @@ -118,10 +119,10 @@ public void ItBuildsServicesIntoKernel() Kernel kernel = builder.Build(); Assert.IsType(kernel.GetRequiredService("openai")); - Assert.IsType(kernel.GetRequiredService("azureopenai")); + Assert.IsType(kernel.GetRequiredService("azureopenai")); Assert.Equal(2, kernel.GetAllServices().Count()); - Assert.Single(kernel.GetAllServices()); + Assert.Equal(2, kernel.GetAllServices().Count()); Assert.Equal(3, kernel.GetAllServices().Count()); } diff --git a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs index ecb051b7d7b1..e3ad0cd53a5c 100644 --- a/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/Prompt/ChatPromptParserTests.cs @@ -114,6 +114,40 @@ public void ItReturnsChatHistoryWithValidContentItemsIncludeCData() """, c.Content)); } + [Fact] + public void ItReturnsChatHistoryWithValidDataImageContent() + { + // Arrange + string prompt = GetValidPromptWithDataUriImageContent(); + + // Act + bool result = ChatPromptParser.TryParse(prompt, out var chatHistory); + + // Assert + Assert.True(result); + Assert.NotNull(chatHistory); + + Assert.Collection(chatHistory, + c => Assert.Equal("What can I help with?", c.Content), + c => + { + Assert.Equal("Explain this image", c.Content); + Assert.Collection(c.Items, + o => + { + Assert.IsType(o); + Assert.Equal("Explain this image", ((TextContent)o).Text); + }, + o => + { + Assert.IsType(o); + Assert.Equal("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAAXNSR0IArs4c6QAAACVJREFUKFNj/KTO/J+BCMA4iBUyQX1A0I10VAizCj1oMdyISyEAFoQbHwTcuS8AAAAASUVORK5CYII=", ((ImageContent)o).DataUri); + Assert.Equal("image/png", ((ImageContent)o).MimeType); + Assert.NotNull(((ImageContent)o).Data); + }); + }); + } + [Fact] public void ItReturnsChatHistoryWithValidContentItemsIncludeCode() { @@ -210,6 +244,21 @@ Second line. """; } + private static string GetValidPromptWithDataUriImageContent() + { + return + """ + + What can I help with? + + + Explain this image + data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAAXNSR0IArs4c6QAAACVJREFUKFNj/KTO/J+BCMA4iBUyQX1A0I10VAizCj1oMdyISyEAFoQbHwTcuS8AAAAASUVORK5CYII= + + + """; + } + private static string GetValidPromptWithCDataSection() { return diff --git a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs index 5fecdf71b8c3..b3bb439b5ac6 100644 --- a/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs +++ b/dotnet/src/SemanticKernel.UnitTests/PromptTemplate/PromptTemplateConfigTests.cs @@ -1,6 +1,7 @@ ๏ปฟ// Copyright (c) Microsoft. All rights reserved. using System; +using System.Linq; using System.Text.Json; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; @@ -376,6 +377,108 @@ public void DeserializingExpectCompletion() Assert.Equal("gpt-4", promptTemplateConfig.DefaultExecutionSettings?.ModelId); } + [Fact] + public void DeserializingAutoFunctionCallingChoice() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "execution_settings": { + "default": { + "model_id": "gpt-4", + "function_choice_behavior": { + "type": "auto", + "functions":["p1.f1"] + } + } + } + } + """; + + // Act + var promptTemplateConfig = PromptTemplateConfig.FromJson(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.Single(promptTemplateConfig.ExecutionSettings); + + var executionSettings = promptTemplateConfig.ExecutionSettings.Single().Value; + + var autoFunctionCallChoice = executionSettings.FunctionChoiceBehavior as AutoFunctionChoiceBehavior; + Assert.NotNull(autoFunctionCallChoice); + + Assert.NotNull(autoFunctionCallChoice.Functions); + Assert.Equal("p1.f1", autoFunctionCallChoice.Functions.Single()); + } + + [Fact] + public void DeserializingRequiredFunctionCallingChoice() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "execution_settings": { + "default": { + "model_id": "gpt-4", + "function_choice_behavior": { + "type": "required", + "functions":["p1.f1"] + } + } + } + } + """; + + // Act + var promptTemplateConfig = PromptTemplateConfig.FromJson(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.Single(promptTemplateConfig.ExecutionSettings); + + var executionSettings = promptTemplateConfig.ExecutionSettings.Single().Value; + Assert.NotNull(executionSettings); + + var requiredFunctionCallChoice = executionSettings.FunctionChoiceBehavior as RequiredFunctionChoiceBehavior; + Assert.NotNull(requiredFunctionCallChoice); + + Assert.NotNull(requiredFunctionCallChoice.Functions); + Assert.Equal("p1.f1", requiredFunctionCallChoice.Functions.Single()); + } + + [Fact] + public void DeserializingNoneFunctionCallingChoice() + { + // Arrange + string configPayload = """ + { + "schema": 1, + "execution_settings": { + "default": { + "model_id": "gpt-4", + "function_choice_behavior": { + "type": "none" + } + } + } + } + """; + + // Act + var promptTemplateConfig = PromptTemplateConfig.FromJson(configPayload); + + // Assert + Assert.NotNull(promptTemplateConfig); + Assert.Single(promptTemplateConfig.ExecutionSettings); + + var executionSettings = promptTemplateConfig.ExecutionSettings.Single().Value; + + var noneFunctionCallChoice = executionSettings.FunctionChoiceBehavior as NoneFunctionChoiceBehavior; + Assert.NotNull(noneFunctionCallChoice); + } + [Fact] public void DeserializingExpectInputVariables() { diff --git a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj index e929fe1ca82f..979a268c7ac8 100644 --- a/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj +++ b/dotnet/src/SemanticKernel.UnitTests/SemanticKernel.UnitTests.csproj @@ -24,20 +24,21 @@ - - - + + + + diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs new file mode 100644 index 000000000000..b069902be604 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/AIConnectors/FunctionCallsProcessorTests.cs @@ -0,0 +1,762 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.ChatCompletion; +#pragma warning disable IDE0005 // Using directive is unnecessary +using Microsoft.SemanticKernel.Connectors.FunctionCalling; +#pragma warning restore IDE0005 // Using directive is unnecessary +using Moq; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities.AIConnectors; +public class FunctionCallsProcessorTests +{ + private readonly FunctionCallsProcessor _sut; + + public FunctionCallsProcessorTests() + { + this._sut = new FunctionCallsProcessor(); + } + + [Fact] + public void ItShouldReturnNoConfigurationIfNoBehaviorProvided() + { + // Act + var config = this._sut.GetConfiguration(behavior: null, chatHistory: [], requestIndex: 0, kernel: null); + + // Assert + Assert.Null(config); + } + + [Fact] + public void ItShouldNotDisableAutoInvocationIfMaximumAutoInvocationLimitNotReached() + { + // Act + var config = this._sut.GetConfiguration(behavior: FunctionChoiceBehavior.Auto(), chatHistory: [], requestIndex: 127, kernel: CreateKernel()); + + // Assert + Assert.True(config!.AutoInvoke); + } + + [Fact] + public void ItShouldDisableAutoInvocationIfNoKernelIsProvided() + { + // Arrange + var behaviorMock = new Mock(); + behaviorMock + .Setup(b => b.GetConfiguration(It.IsAny())) + .Returns(new FunctionChoiceBehaviorConfiguration(options: new FunctionChoiceBehaviorOptions())); + + // Act + var config = this._sut.GetConfiguration(behavior: behaviorMock.Object, chatHistory: [], requestIndex: 128, kernel: null); // No kernel provided + + // Assert + Assert.False(config!.AutoInvoke); + } + + [Fact] + public void ItShouldDisableAutoInvocationIfMaximumAutoInvocationLimitReached() + { + // Act + var config = this._sut.GetConfiguration(behavior: FunctionChoiceBehavior.Auto(), chatHistory: [], requestIndex: 128, kernel: CreateKernel()); + + // Assert + Assert.False(config!.AutoInvoke); + } + + [Fact] + public async Task ItShouldDisableAutoInvocationIfMaximumInflightAutoInvocationLimitReachedAsync() + { + // Arrange + var kernel = CreateKernel(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("ProcessFunctionCallsRecursivelyToReachInflightLimitAsync", "test")); + + int invocationNumber = 0; + FunctionChoiceBehaviorConfiguration? expectedConfiguration = null; + + async Task ProcessFunctionCallsRecursivelyToReachInflightLimitAsync() + { + if (invocationNumber++ == 128) // 128 is the current Inflight limit + { + expectedConfiguration = this._sut.GetConfiguration(behavior: FunctionChoiceBehavior.Auto(), chatHistory: [], requestIndex: 0, kernel: kernel); + return; + } + + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: [], + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel, + cancellationToken: CancellationToken.None); + } + + kernel.Plugins.AddFromFunctions("test", [KernelFunctionFactory.CreateFromMethod(ProcessFunctionCallsRecursivelyToReachInflightLimitAsync, "ProcessFunctionCallsRecursivelyToReachInflightLimitAsync")]); + + // Act + var res = await kernel.InvokeAsync("test", "ProcessFunctionCallsRecursivelyToReachInflightLimitAsync"); + + // Assert + Assert.NotNull(expectedConfiguration); + Assert.False(expectedConfiguration!.AutoInvoke); + } + + [Fact] + public async Task ItShouldAddFunctionCallAssistantMessageToChatHistoryAsync() + { + // Arrange + var chatHistory = new ChatHistory(); + var chatMessageContent = new ChatMessageContent(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: CreateKernel(), + cancellationToken: CancellationToken.None); + + // Assert + Assert.Single(chatHistory); + Assert.Contains(chatMessageContent, chatHistory); + } + + [Fact] + public async Task ItShouldAddFunctionCallExceptionToChatHistoryAsync() + { + // Arrange + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin") + { + Exception = new JsonException("Deserialization failed.") // Simulate an exception + }); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: CreateKernel(), + cancellationToken: CancellationToken.None); + + // Assert + var functionResult = chatHistory[1].Items.OfType().Single(); + + Assert.Equal("MyPlugin", functionResult.PluginName); + Assert.Equal("Function1", functionResult.FunctionName); + Assert.Equal("Error: Function call processing failed. Deserialization failed.", functionResult.Result); + } + + [Fact] + public async Task ItShouldAddFunctionInvocationExceptionToChatHistoryAsync() + { + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod(() => { throw new InvalidOperationException("This is test exception."); }, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = CreateKernel(plugin); + + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin")); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel, + cancellationToken: CancellationToken.None); + + // Assert + var functionResult = chatHistory[1].Items.OfType().Single(); + + Assert.Equal("MyPlugin", functionResult.PluginName); + Assert.Equal("Function1", functionResult.FunctionName); + Assert.Equal("Error: Exception while invoking function. This is test exception.", functionResult.Result); + } + + [Fact] + public async Task ItShouldAddErrorToChatHistoryIfFunctionCallNotAdvertisedAsync() + { + // Arrange + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin")); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => false, // Return false to simulate that the function is not advertised + kernel: CreateKernel(), + cancellationToken: CancellationToken.None); + + // Assert + var functionResult = chatHistory[1].Items.OfType().Single(); + + Assert.Equal("MyPlugin", functionResult.PluginName); + Assert.Equal("Function1", functionResult.FunctionName); + Assert.Equal("Error: Function call request for a function that wasn't defined.", functionResult.Result); + } + + [Fact] + public async Task ItShouldAddErrorToChatHistoryIfFunctionIsNotRegisteredOnKernelAsync() + { + // Arrange + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin")); // The call for function that is not registered on the kernel + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: CreateKernel(), + cancellationToken: CancellationToken.None); + + // Assert + var functionResult = chatHistory[1].Items.OfType().Single(); + + Assert.Equal("MyPlugin", functionResult.PluginName); + Assert.Equal("Function1", functionResult.FunctionName); + Assert.Equal("Error: Requested function could not be found.", functionResult.Result); + } + + [Fact] + public async Task ItShouldInvokeFunctionsAsync() + { + // Arrange + int functionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = CreateKernel(plugin); + + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(2, functionInvocations); + + Assert.Equal(3, chatHistory.Count); + + var function1Result = chatHistory[1].Items.OfType().Single(); + Assert.Equal("MyPlugin", function1Result.PluginName); + Assert.Equal("Function1", function1Result.FunctionName); + Assert.Equal("function1-result", function1Result.Result); + + var function2Result = chatHistory[2].Items.OfType().Single(); + Assert.Equal("MyPlugin", function2Result.PluginName); + Assert.Equal("Function2", function2Result.FunctionName); + Assert.Equal("function2-result", function2Result.Result); + } + + [Fact] + public async Task ItShouldInvokeFiltersAsync() + { + // Arrange + int filterInvocations = 0; + int functionInvocations = 0; + int[] expectedRequestSequenceNumbers = [0, 0]; + int[] expectedFunctionSequenceNumbers = [0, 1]; + List requestSequenceNumbers = []; + List functionSequenceNumbers = []; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + Kernel? kernel = null; + kernel = CreateKernel(plugin, async (context, next) => + { + Assert.Equal(kernel, context.Kernel); + + requestSequenceNumbers.Add(context.RequestSequenceIndex); + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + + filterInvocations++; + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(2, filterInvocations); + Assert.Equal(2, functionInvocations); + Assert.Equal(expectedRequestSequenceNumbers, requestSequenceNumbers); + Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers); + + Assert.Equal(3, chatHistory.Count); + + Assert.Same(chatMessageContent, chatHistory[0]); + + var function1Result = chatHistory[1].Items.OfType().Single(); + Assert.Equal("function1-result", function1Result.Result); + + var function2Result = chatHistory[2].Items.OfType().Single(); + Assert.Equal("function2-result", function2Result.Result); + } + + [Fact] + public async Task ItShouldInvokeMultipleFiltersInOrderAsync() + { + // Arrange + var function = KernelFunctionFactory.CreateFromMethod(() => "Result"); + var executionOrder = new List(); + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var filter1 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter1-Invoking"); + await next(context); + executionOrder.Add("Filter1-Invoked"); + }); + + var filter2 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter2-Invoking"); + await next(context); + executionOrder.Add("Filter2-Invoked"); + }); + + var filter3 = new AutoFunctionInvocationFilter(async (context, next) => + { + executionOrder.Add("Filter3-Invoking"); + await next(context); + executionOrder.Add("Filter3-Invoked"); + }); + + var builder = Kernel.CreateBuilder(); + + builder.Plugins.Add(plugin); + + builder.Services.AddSingleton(filter1); + builder.Services.AddSingleton(filter2); + builder.Services.AddSingleton(filter3); + + var kernel = builder.Build(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal("Filter1-Invoking", executionOrder[0]); + Assert.Equal("Filter2-Invoking", executionOrder[1]); + Assert.Equal("Filter3-Invoking", executionOrder[2]); + Assert.Equal("Filter3-Invoked", executionOrder[3]); + Assert.Equal("Filter2-Invoked", executionOrder[4]); + Assert.Equal("Filter1-Invoked", executionOrder[5]); + Assert.Equal(3, chatHistory.Count); + } + + [Fact] + public async Task FilterCanOverrideArgumentsAsync() + { + // Arrange + const string NewValue = "NewValue"; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = CreateKernel(plugin, async (context, next) => + { + context.Arguments!["parameter"] = NewValue; + await next(context); + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(3, chatHistory.Count); + Assert.Same(chatMessageContent, chatHistory[0]); + var function1Result = chatHistory[1].Items.OfType().Single(); + Assert.Equal("NewValue", function1Result.Result); + var function2Result = chatHistory[2].Items.OfType().Single(); + Assert.Equal("NewValue", function2Result.Result); + } + + [Fact] + public async Task FilterCanHandleExceptionAsync() + { + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = CreateKernel(plugin, async (context, next) => + { + try + { + await next(context); + } + catch (KernelException exception) + { + Assert.Equal("Exception from Function1", exception.Message); + context.Result = new FunctionResult(context.Result, "Result from filter"); + } + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + chatHistory.AddSystemMessage("System message"); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + var firstFunctionResult = chatHistory[^2].Content; + var secondFunctionResult = chatHistory[^1].Content; + + // Assert + Assert.Equal("Result from filter", firstFunctionResult); + Assert.Equal("Result from Function2", secondFunctionResult); + } + + [Fact] + public async Task FiltersCanSkipFunctionExecutionAsync() + { + // Arrange + int filterInvocations = 0; + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = CreateKernel(plugin, async (context, next) => + { + // Filter delegate is invoked only for second function, the first one should be skipped. + if (context.Function.Name == "Function2") + { + await next(context); + } + + filterInvocations++; + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(2, filterInvocations); + Assert.Equal(0, firstFunctionInvocations); + Assert.Equal(1, secondFunctionInvocations); + } + + [Fact] + public async Task PreFilterCanTerminateOperationAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = CreateKernel(plugin, async (context, next) => + { + // Terminating before first function, so all functions won't be invoked. + context.Terminate = true; + + await next(context); + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(0, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + } + + [Fact] + public async Task PostFilterCanTerminateOperationAsync() + { + // Arrange + int firstFunctionInvocations = 0; + int secondFunctionInvocations = 0; + List functionSequenceNumbers = []; + + var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1"); + var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]); + + var kernel = CreateKernel(plugin, async (context, next) => + { + functionSequenceNumbers.Add(context.FunctionSequenceIndex); + + await next(context); + + // Terminating after first function, so second function won't be invoked. + context.Terminate = true; + }); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function1-result" })); + chatMessageContent.Items.Add(new FunctionCallContent("Function2", "MyPlugin", arguments: new KernelArguments() { ["parameter"] = "function2-result" })); + + var chatHistory = new ChatHistory(); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel!, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(1, firstFunctionInvocations); + Assert.Equal(0, secondFunctionInvocations); + Assert.Equal([0], functionSequenceNumbers); + + Assert.Equal(2, chatHistory.Count); + + var function1Result = chatHistory[1].Items.OfType().Single(); + Assert.Equal("function1-result", function1Result.Result); + } + + private static Kernel CreateKernel(KernelPlugin? plugin = null, Func, Task>? onAutoFunctionInvocation = null) + { + var builder = Kernel.CreateBuilder(); + + if (plugin is not null) + { + builder.Plugins.Add(plugin); + } + + if (onAutoFunctionInvocation is not null) + { + builder.Services.AddSingleton(new AutoFunctionInvocationFilter(onAutoFunctionInvocation)); + } + + return builder.Build(); + } + + [Fact] + public async Task ItShouldHandleChatMessageContentAsFunctionResultAsync() + { + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod(() => { return new ChatMessageContent(AuthorRole.User, "function1-result"); }, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = CreateKernel(plugin); + + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin")); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(2, chatHistory.Count); + + var function1Result = chatHistory[1].Items.OfType().Single(); + Assert.Equal("MyPlugin", function1Result.PluginName); + Assert.Equal("Function1", function1Result.FunctionName); + Assert.IsType(function1Result.Result); + Assert.Equal("function1-result", function1Result.Result); + } + + [Fact] + public async Task ItShouldSerializeFunctionResultOfUnknowTypeAsync() + { + // Arrange + var function1 = KernelFunctionFactory.CreateFromMethod(() => { return new { a = 2, b = "test" }; }, "Function1"); + var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1]); + + var kernel = CreateKernel(plugin); + + var chatHistory = new ChatHistory(); + + var chatMessageContent = new ChatMessageContent(); + chatMessageContent.Items.Add(new FunctionCallContent("Function1", "MyPlugin")); + + // Act + await this._sut.ProcessFunctionCallsAsync( + chatMessageContent: chatMessageContent, + chatHistory: chatHistory, + requestIndex: 0, + checkIfFunctionAdvertised: (_) => true, + kernel: kernel, + cancellationToken: CancellationToken.None); + + // Assert + Assert.Equal(2, chatHistory.Count); + + var function1Result = chatHistory[1].Items.OfType().Single(); + Assert.Equal("MyPlugin", function1Result.PluginName); + Assert.Equal("Function1", function1Result.FunctionName); + Assert.IsType(function1Result.Result); + Assert.Equal("{\"a\":2,\"b\":\"test\"}", function1Result.Result); + } + + [Fact] + public void ItShouldHandleFunctionResultsOfStringType() + { + // Arrange + string functionResult = "Test result"; + + // Act + var result = FunctionCallsProcessor.ProcessFunctionResult(functionResult); + + // Assert + Assert.Equal(functionResult, result); + } + + [Fact] + public void ItShouldHandleFunctionResultsOfChatMessageContentType() + { + // Arrange + var functionResult = new ChatMessageContent(AuthorRole.User, "Test result"); + + // Act + var result = FunctionCallsProcessor.ProcessFunctionResult(functionResult); + + // Assert + Assert.Equal("Test result", result); + } + + [Fact] + public void ItShouldSerializeFunctionResultsOfComplexType() + { + // Arrange + var functionResult = new { a = 2, b = "test" }; + + // Act + var result = FunctionCallsProcessor.ProcessFunctionResult(functionResult); + + // Assert + Assert.Equal("{\"a\":2,\"b\":\"test\"}", result); + } + + private sealed class AutoFunctionInvocationFilter( + Func, Task>? onAutoFunctionInvocation) : IAutoFunctionInvocationFilter + { + private readonly Func, Task>? _onAutoFunctionInvocation = onAutoFunctionInvocation; + + public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) => + this._onAutoFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask; + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/KernelJsonSchemaBuilderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/KernelJsonSchemaBuilderTests.cs new file mode 100644 index 000000000000..6079979db21a --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/KernelJsonSchemaBuilderTests.cs @@ -0,0 +1,34 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities; + +public class KernelJsonSchemaBuilderTests +{ + [Fact] + public void ItShouldBuildJsonSchemaForTypesWithPublicMembersHavingTypesThatCanRepresentOtherTypesWithDefaultValuesInTheConstructor() + { + // Arrange & Act + var schema = KernelJsonSchemaBuilder.Build(null, typeof(ClassWithDefaultValuesInConstructorForTypesThatCanRepresentOtherTypes)); + + // Assert + Assert.NotNull(schema?.RootElement); + } + +#pragma warning disable CA1812 // Instantiated by reflection + private sealed class ClassWithDefaultValuesInConstructorForTypesThatCanRepresentOtherTypes + { + public ClassWithDefaultValuesInConstructorForTypesThatCanRepresentOtherTypes(object? content = null, KernelJsonSchema? schema = null) + { + this.Content = content; + this.Schema = schema; + } + + public object? Content { get; set; } + + public KernelJsonSchema? Schema { get; set; } + } +#pragma warning restore CA1812 // Instantiated by reflection +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs new file mode 100644 index 000000000000..ca36f300b1c2 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/GenericActionPipelinePolicyTests.cs @@ -0,0 +1,39 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel.Primitives; +using Xunit; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class GenericActionPipelinePolicyTests +{ + [Fact] + public void ItCanBeInstantiated() + { + // Act + var addHeaderRequestPolicy = new GenericActionPipelinePolicy((message) => { }); + + // Assert + Assert.NotNull(addHeaderRequestPolicy); + } + + [Fact] + public void ItProcessAddsHeaderToRequest() + { + // Arrange + var headerName = "headerName"; + var headerValue = "headerValue"; + var sut = new GenericActionPipelinePolicy((message) => { message.Request.Headers.Add(headerName, headerValue); }); + + var pipeline = ClientPipeline.Create(); + var message = pipeline.CreateMessage(); + + // Act + sut.Process(message, [sut], 0); + + // Assert + message.Request.Headers.TryGetValue(headerName, out var value); + Assert.NotNull(value); + Assert.Equal(headerValue, value); + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs new file mode 100644 index 000000000000..d147f1c98df1 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockPipelineResponse.cs @@ -0,0 +1,151 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel.Primitives; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class MockPipelineResponse : PipelineResponse +{ + private int _status; + private string _reasonPhrase; + private Stream? _contentStream; + private BinaryData? _bufferedContent; + + private readonly PipelineResponseHeaders _headers; + + private bool _disposed; + + public MockPipelineResponse(int status = 0, string reasonPhrase = "") + { + this._status = status; + this._reasonPhrase = reasonPhrase; + this._headers = new MockResponseHeaders(); + } + + public override int Status => this._status; + + public void SetStatus(int value) => this._status = value; + + public override string ReasonPhrase => this._reasonPhrase; + + public void SetReasonPhrase(string value) => this._reasonPhrase = value; + + public void SetContent(byte[] content) + { + this.ContentStream = new MemoryStream(content, 0, content.Length, false, true); + } + + public MockPipelineResponse SetContent(string content) + { + this.SetContent(Encoding.UTF8.GetBytes(content)); + return this; + } + + public override Stream? ContentStream + { + get => this._contentStream; + set => this._contentStream = value; + } + + public override BinaryData Content + { + get + { + if (this._contentStream is null) + { + return new BinaryData(Array.Empty()); + } + + if (this.ContentStream is not MemoryStream memoryContent) + { + throw new InvalidOperationException("The response is not buffered."); + } + + if (memoryContent.TryGetBuffer(out ArraySegment segment)) + { + return new BinaryData(segment.AsMemory()); + } + return new BinaryData(memoryContent.ToArray()); + } + } + + protected override PipelineResponseHeaders HeadersCore + => this._headers; + + public sealed override void Dispose() + { + this.Dispose(true); + + GC.SuppressFinalize(this); + } + + protected void Dispose(bool disposing) + { + if (disposing && !this._disposed) + { + Stream? content = this._contentStream; + if (content != null) + { + this._contentStream = null; + content.Dispose(); + } + + this._disposed = true; + } + } + + public override BinaryData BufferContent(CancellationToken cancellationToken = default) + { + if (this._bufferedContent is not null) + { + return this._bufferedContent; + } + + if (this._contentStream is null) + { + this._bufferedContent = new BinaryData(Array.Empty()); + return this._bufferedContent; + } + + MemoryStream bufferStream = new(); + this._contentStream.CopyTo(bufferStream); + this._contentStream.Dispose(); + this._contentStream = bufferStream; + + // Less efficient FromStream method called here because it is a mock. + // For intended production implementation, see HttpClientTransportResponse. + this._bufferedContent = BinaryData.FromStream(bufferStream); + return this._bufferedContent; + } + + public override async ValueTask BufferContentAsync(CancellationToken cancellationToken = default) + { + if (this._bufferedContent is not null) + { + return this._bufferedContent; + } + + if (this._contentStream is null) + { + this._bufferedContent = new BinaryData(Array.Empty()); + return this._bufferedContent; + } + + MemoryStream bufferStream = new(); + + await this._contentStream.CopyToAsync(bufferStream, cancellationToken).ConfigureAwait(false); + await this._contentStream.DisposeAsync().ConfigureAwait(false); + + this._contentStream = bufferStream; + + // Less efficient FromStream method called here because it is a mock. + // For intended production implementation, see HttpClientTransportResponse. + this._bufferedContent = BinaryData.FromStream(bufferStream); + return this._bufferedContent; + } +} diff --git a/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs new file mode 100644 index 000000000000..01d698512be5 --- /dev/null +++ b/dotnet/src/SemanticKernel.UnitTests/Utilities/OpenAI/MockResponseHeaders.cs @@ -0,0 +1,32 @@ +๏ปฟ// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; + +namespace SemanticKernel.UnitTests.Utilities.OpenAI; + +public class MockResponseHeaders : PipelineResponseHeaders +{ + private readonly Dictionary _headers; + + public MockResponseHeaders() + { + this._headers = new Dictionary(); + } + + public override IEnumerator> GetEnumerator() + { + throw new NotImplementedException(); + } + + public override bool TryGetValue(string name, out string? value) + { + return this._headers.TryGetValue(name, out value); + } + + public override bool TryGetValues(string name, out IEnumerable? values) + { + throw new NotImplementedException(); + } +} diff --git a/prompt_template_samples/ChatPlugin/Chat/config.json b/prompt_template_samples/ChatPlugin/Chat/config.json index fa98c67602e8..ae1dba827434 100644 --- a/prompt_template_samples/ChatPlugin/Chat/config.json +++ b/prompt_template_samples/ChatPlugin/Chat/config.json @@ -5,9 +5,9 @@ "default": { "max_tokens": 150, "temperature": 0.9, - "top_p": 0.0, + "top_p": 0.1, "presence_penalty": 0.6, - "frequency_penalty": 0.0, + "frequency_penalty": 0.1, "stop_sequences": [ "Human:", "AI:" diff --git a/prompt_template_samples/FunPlugin/Limerick/config.json b/prompt_template_samples/FunPlugin/Limerick/config.json index f929ede1e31a..659b1f4f897f 100644 --- a/prompt_template_samples/FunPlugin/Limerick/config.json +++ b/prompt_template_samples/FunPlugin/Limerick/config.json @@ -5,9 +5,9 @@ "default": { "max_tokens": 100, "temperature": 0.7, - "top_p": 0, - "presence_penalty": 0, - "frequency_penalty": 0 + "top_p": 0.1, + "presence_penalty": 0.1, + "frequency_penalty": 0.1 } }, "input_variables": [ diff --git a/python/.conf/packages_list.json b/python/.conf/packages_list.json deleted file mode 100644 index d4011687b48c..000000000000 --- a/python/.conf/packages_list.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "black": { - "repo": "https://github.com/psf/black", - "rev": "${rev}" - }, - "mypy": { - "repo": "https://github.com/pre-commit/mirrors-mypy", - "rev": "v${rev}" - }, - "ruff": { - "repo": "https://github.com/astral-sh/ruff-pre-commit", - "rev": "v${rev}" - } -} \ No newline at end of file diff --git a/python/.cspell.json b/python/.cspell.json index 79602e7ac6ac..59047cdb4e05 100644 --- a/python/.cspell.json +++ b/python/.cspell.json @@ -24,36 +24,41 @@ ], "words": [ "aeiou", + "aiplatform", "azuredocindex", "azuredocs", "contentvector", + "contoso", + "datamodel", "dotenv", + "endregion", + "genai", + "generativeai", + "httpx", + "huggingface", + "kernelfunction", "logit", "logprobs", + "mistralai", "mongocluster", "ndarray", "nopep", "ollama", "onyourdatatest", "OPENAI", + "opentelemetry", + "OTEL", + "protos", "pydantic", + "pytestmark", + "qdrant", "retrywrites", - "kernelfunction", + "SEMANTICKERNEL", + "serde", "skprompt", "templating", "vectordb", - "generativeai", - "genai", - "protos", - "endregion", - "vertexai", - "aiplatform", - "serde", - "datamodel", "vectorstoremodel", - "qdrant", - "huggingface", - "pytestmark", - "contoso" + "vertexai" ] -} \ No newline at end of file +} diff --git a/python/.env.example b/python/.env.example index d63e29eb17c3..4d5ac35c51f9 100644 --- a/python/.env.example +++ b/python/.env.example @@ -8,30 +8,11 @@ AZURE_OPENAI_TEXT_DEPLOYMENT_NAME="" AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME="" AZURE_OPENAI_ENDPOINT="" AZURE_OPENAI_API_KEY="" -AZURE_OPENAI_API_VERSION="2024-02-15-preview" -AZURE_OPENAI_TEMPERATURE=0 -AZURE_OPENAI_MAX_TOKENS=1000 -AZURE_OPENAI_TOP_P=1.0 -AZURE_OPENAI_STREAM=true +AZURE_OPENAI_API_VERSION="" AZURE_AISEARCH_URL="" AZURE_AISEARCH_SERVICE="" AZURE_AISEARCH_API_KEY="" AZURE_AISEARCH_INDEX_NAME="" -AZURE_AISEARCH_EMBEDDING_DEPLOYMENT_NAME="" -AZURE_AISEARCH_USE_SEMANTIC_SEARCH=false -AZURE_AISEARCH_SEMANTIC_SEARCH_CONFIG=default -AZURE_AISEARCH_INDEX_IS_PRECHUNKED=false -AZURE_AISEARCH_TOP_K=5 -AZURE_AISEARCH_ENABLE_IN_DOMAIN=true -AZURE_AISEARCH_CONTENT_COLUMNS=content -AZURE_AISEARCH_FILEPATH_COLUMN=filepath -AZURE_AISEARCH_TITLE_COLUMN=title -AZURE_AISEARCH_URL_COLUMN=url -AZURE_AISEARCH_VECTOR_COLUMNS=contentVector -AZURE_AISEARCH_QUERY_TYPE=simple -AZURE_AISEARCH_PERMITTED_GROUPS_COLUMN= -AZURE_AISEARCH_STRICTNESS=3 -AZURE_AISEARCH_FILTER="" MONGODB_ATLAS_CONNECTION_STRING="" PINECONE_API_KEY="" PINECONE_ENVIRONMENT="" @@ -40,11 +21,10 @@ WEAVIATE_URL="" WEAVIATE_API_KEY="" GOOGLE_SEARCH_ENGINE_ID="" REDIS_CONNECTION_STRING="" -AZCOSMOS_API = "" // should be mongo-vcore for now, as CosmosDB only supports vector search in mongo-vcore for now. -AZCOSMOS_CONNSTR = "" -AZCOSMOS_DATABASE_NAME = "" -AZCOSMOS_CONTAINER_NAME = "" -# Starts with AstraCS: +AZCOSMOS_API="" +AZCOSMOS_CONNSTR="" +AZCOSMOS_DATABASE_NAME="" +AZCOSMOS_CONTAINER_NAME="" ASTRADB_APP_TOKEN="" ASTRADB_ID="" ASTRADB_REGION="" diff --git a/.pre-commit-config.yaml b/python/.pre-commit-config.yaml similarity index 79% rename from .pre-commit-config.yaml rename to python/.pre-commit-config.yaml index 5fd6aa7d9377..5c3521dd142d 100644 --- a/.pre-commit-config.yaml +++ b/python/.pre-commit-config.yaml @@ -1,11 +1,6 @@ files: ^python/ fail_fast: true repos: - - repo: https://github.com/floatingpurr/sync_with_poetry - rev: 1.1.0 - hooks: - - id: sync_with_poetry - args: [--config=.pre-commit-config.yaml, --db=python/.conf/packages_list.json, python/poetry.lock] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: @@ -32,21 +27,29 @@ repos: name: Check Valid Python Notebooks types: ["jupyter"] - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.17.0 hooks: - id: pyupgrade args: [--py310-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.7 + rev: v0.6.1 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix ] + - id: ruff-format + - repo: https://github.com/astral-sh/uv-pre-commit + # uv version. + rev: 0.3.3 + hooks: + # Update the uv lockfile + - id: uv-lock + files: ^\./python/(uv\.lock|pyproject\.toml)$ - repo: local hooks: - id: mypy files: ^python/semantic_kernel/ name: mypy - entry: poetry -C python/ run python -m mypy -p semantic_kernel --config-file=python/mypy.ini + entry: uv run mypy -p semantic_kernel --config-file python/mypy.ini language: system types: [python] pass_filenames: false diff --git a/python/.vscode/settings.json b/python/.vscode/settings.json index dca92354cf5e..93b973d6fc73 100644 --- a/python/.vscode/settings.json +++ b/python/.vscode/settings.json @@ -30,5 +30,8 @@ "notebook.codeActionsOnSave": { "source.fixAll": true, "source.organizeImports": true - } + }, + "python.analysis.extraPaths": [ + "${workspaceFolder}/samples/learn_resources" + ], } \ No newline at end of file diff --git a/python/.vscode/tasks.json b/python/.vscode/tasks.json index fa9736a6fd5e..dbd972976939 100644 --- a/python/.vscode/tasks.json +++ b/python/.vscode/tasks.json @@ -6,7 +6,7 @@ { "label": "Python: Run Checks", "type": "shell", - "command": "poetry", + "command": "uv", "args": [ "run", "pre-commit", @@ -34,7 +34,7 @@ { "label": "Python: Run Checks - Staged", "type": "shell", - "command": "poetry", + "command": "uv", "args": [ "run", "pre-commit", @@ -61,7 +61,7 @@ { "label": "Python: Run Mypy", "type": "shell", - "command": "poetry", + "command": "uv", "args": [ "run", "pre-commit", @@ -90,21 +90,17 @@ { "label": "Python: Install", "type": "shell", - "command": "poetry", - "args": [ - "install", - "--all-extras" - ], + "command": "make install PYTHON_VERSION=${input:py_version}", "presentation": { - "reveal": "silent", - "panel": "shared" + "reveal": "always", + "panel": "new" }, "problemMatcher": [] }, { "label": "Python: Tests - Unit", "type": "shell", - "command": "poetry", + "command": "uv", "args": [ "run", "pytest", @@ -120,13 +116,13 @@ { "label": "Python: Tests - Unit - Failed Only", "type": "shell", - "command": "poetry", + "command": "uv", "args": [ "run", "pytest", "tests/unit/", "--last-failed", - "-v" + "-vv" ], "group": "test", "presentation": { @@ -138,7 +134,7 @@ { "label": "Python: Tests - Code Coverage", "type": "shell", - "command": "poetry run pytest --cov=semantic_kernel --cov-report=term-missing:skip-covered tests/unit/", + "command": "uv run pytest --cov=semantic_kernel --cov-report=term-missing:skip-covered tests/unit/", "group": "test", "presentation": { "reveal": "always", @@ -149,7 +145,7 @@ { "label": "Python: Tests - All", "type": "shell", - "command": "poetry", + "command": "uv", "args": [ "run", "pytest", @@ -168,5 +164,18 @@ }, "problemMatcher": [] } + ], + "inputs": [ + { + "type": "pickString", + "options": [ + "3.10", + "3.11", + "3.12" + ], + "id": "py_version", + "description": "Python version", + "default": "3.10" + } ] } \ No newline at end of file diff --git a/python/DEV_SETUP.md b/python/DEV_SETUP.md index e9a938259cae..d81c64be6bc1 100644 --- a/python/DEV_SETUP.md +++ b/python/DEV_SETUP.md @@ -1,164 +1,126 @@ # Dev Setup -This document describes how to setup your environment with Python and Poetry, +This document describes how to setup your environment with Python and uv, if you're working on new features or a bug fix for Semantic Kernel, or simply want to run the tests included. -## LLM setup - -Make sure you have an -[OpenAI API Key](https://platform.openai.com) or -[Azure OpenAI service key](https://learn.microsoft.com/azure/cognitive-services/openai/quickstart?pivots=rest-api) - -There are two methods to manage keys, secrets, and endpoints: - -1. Store them in environment variables. SK Python leverages pydantic settings to load keys, secrets, and endpoints. This means that there is a first attempt to load them from environment variables. The `.env` file naming applies to how the names should be stored as environment variables. - -2. If you'd like to use the `.env` file, you will need to configure the `.env` file with the following keys into a `.env` file (see the `.env.example` file): - -``` -OPENAI_API_KEY="" -OPENAI_ORG_ID="" -AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="" -AZURE_OPENAI_TEXT_DEPLOYMENT_NAME="" -AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME="" -AZURE_OPENAI_ENDPOINT="" -AZURE_OPENAI_API_KEY="" -``` +## System setup -You will then configure the Text/ChatCompletion class with the keyword argument `env_file_path`: +## If you're on WSL -```python -chat_completion = OpenAIChatCompletion(service_id="test", env_file_path=) -``` +Check that you've cloned the repository to `~/workspace` or a similar folder. +Avoid `/mnt/c/` and prefer using your WSL user's home directory. -This optional `env_file_path` parameter will allow pydantic settings to use the `.env` file as a fallback to read the settings. +Ensure you have the WSL extension for VSCode installed. -If using the second method, we suggest adding a copy of the `.env` file under these folders: +## Using uv -- [./tests](./tests) -- [./samples/getting_started](./samples/getting_started). +uv allows us to use SK from the local files, without worrying about paths, as +if you had SK pip package installed. -## System setup +To install SK and all the required tools in your system, first, navigate to the directory containing +this DEV_SETUP using your chosen shell. -To get started, you'll need VSCode and a local installation of Python 3.8+. +### For windows (non-WSL) -You can run: +Check the [uv documentation](https://docs.astral.sh/uv/getting-started/installation/) for the installation instructions. At the time of writing this is the command to install uv: -```python - python3 --version ; pip3 --version ; code -v +```powershell +powershell -c "irm https://astral.sh/uv/install.ps1 | iex" ``` -to verify that you have the required dependencies. +You can then run the following commands manually: + +```powershell +# Install Python 3.10, 3.11, and 3.12 +uv python install 3.10 3.11 3.12 +# Create a virtual environment with Python 3.10 (you can change this to 3.11 or 3.12) +$PYTHON_VERSION = "3.10" +uv venv --python $PYTHON_VERSION +# Install SK and all dependencies +uv sync --all-extras --dev +# Install pre-commit hooks +uv run pre-commit install -c python/.pre-commit-config.yaml +``` -## If you're on WSL +Or you can then either install [`make`](https://gnuwin32.sourceforge.net/packages/make.htm) and then follow the guide for Mac and Linux, or run the following commands, the commands are shown as bash but should work in powershell as well. -Check that you've cloned the repository to `~/workspace` or a similar folder. -Avoid `/mnt/c/` and prefer using your WSL user's home directory. +### For Mac and Linux (both native and WSL) -Ensure you have the WSL extension for VSCode installed (and the Python extension -for VSCode installed). - -You'll also need `pip3` installed. If you don't yet have a `python3` install in WSL, -you can run: +It is super simple to get started, run the following commands: ```bash -sudo apt-get update && sudo apt-get install python3 python3-pip +make install ``` -โ„น๏ธ **Note**: if you don't have your PATH setup to find executables installed by `pip3`, -you may need to run `~/.local/bin/poetry install` and `~/.local/bin/poetry shell` -instead. You can fix this by adding `export PATH="$HOME/.local/bin:$PATH"` to -your `~/.bashrc` and closing/re-opening the terminal.\_ - -## Using Poetry - -Poetry allows to use SK from the local files, without worrying about paths, as -if you had SK pip package installed. - -To install Poetry in your system, first, navigate to the directory containing -this README using your chosen shell. You will need to have Python 3.10, 3.11, or 3.12 -installed. +This will install uv, python, Semantic Kernel and all dependencies and the pre-commit config. It uses python 3.10 by default, if you want to change that set the `PYTHON_VERSION` environment variable to the desired version (currently supported are 3.10, 3.11, 3.12). For instance for 3.12" + +```bash +make install PYTHON_VERSION=3.12 +``` -Install the Poetry package manager and create a project virtual environment. -Note: SK requires at least Poetry 1.2.0. +If you want to change python version (without installing uv, python and pre-commit), you can use the same parameter, but do: -### Note for MacOS Users +```bash +make install-sk PYTHON_VERSION=3.12 +``` -It is best to install Poetry using their -[official installer](https://python-poetry.org/docs/#installing-with-the-official-installer). +โ„น๏ธ **Note**: Running the install or install-sk command will wipe away your existing virtual environment and create a new one. -On MacOS, you might find that `python` commands are not recognized by default, -and you can only use `python3`. To make it easier to run `python ...` commands -(which Poetry requires), you can create an alias in your shell configuration file. +Alternatively you can run the VSCode task `Python: Install` to run the same command. -Follow these steps: +## VSCode Setup -1. **Open your shell configuration file**: - - For **Bash**: `nano ~/.bash_profile` or `nano ~/.bashrc` - - For **Zsh** (default on macOS Catalina and later): `nano ~/.zshrc` +Open the workspace in [VSCode](https://code.visualstudio.com/docs/editor/workspaces). +> The workspace for python should be rooted in the `./python` folder. -2. **Add the alias**: - ```sh - alias python='python3' - ``` +Open any of the `.py` files in the project and run the `Python: Select Interpreter` +command from the command palette. Make sure the virtual env (default path is `.venv`) created by +`uv` is selected. -3. **Save the file and exit**: - - In `nano`, press `CTRL + X`, then `Y`, and hit `Enter`. +If prompted, install `ruff`. (It should have been installed as part of `uv sync --dev`). -4. **Apply the changes**: - - For **Bash**: `source ~/.bash_profile` or `source ~/.bashrc` - - For **Zsh**: `source ~/.zshrc` +You also need to install the `ruff` extension in VSCode so that auto-formatting uses the `ruff` formatter on save. +Read more about the extension [here](https://github.com/astral-sh/ruff-vscode). -After these steps, you should be able to use `python` in your terminal to run -Python 3 commands. +## LLM setup -### Poetry Installation +Make sure you have an +[OpenAI API Key](https://platform.openai.com) or +[Azure OpenAI service key](https://learn.microsoft.com/azure/cognitive-services/openai/quickstart?pivots=rest-api) -```bash -# Install poetry package if not choosing to install via their official installer -pip3 install poetry +There are two methods to manage keys, secrets, and endpoints: -# optionally, define which python version you want to use -poetry env use python3.11 +1. Store them in environment variables. SK Python leverages pydantic settings to load keys, secrets, and endpoints from the environment. + > When you are using VSCode and have the python extension setup, it automatically loads environment variables from a `.env` file, so you don't have to manually set them in the terminal. + > During runtime on different platforms, environment settings set as part of the deployments should be used. -# Use poetry to install base project dependencies -poetry install +2. Store them in a separate `.env` file, like `dev.env`, you can then pass that name into the constructor for most services, to the `env_file_path` parameter, see below. + > Do not store `*.env` files in your repository, and make sure to add them to your `.gitignore` file. -# If you want to get all dependencies for tests installed, use -# poetry install --with tests -# example: poetry install --with hugging_face +There are a lot of settings, for a more extensive list of settings, see [ALL_SETTINGS.md](./samples/concepts/setup/ALL_SETTINGS.md). -# Use poetry to activate project venv -poetry shell +### Example for file-based setup with OpenAI Chat Completions +To configure a `.env` file with just the keys needed for OpenAI Chat Completions, you can create a `openai.env` (this name is just as an example, a single `.env` with all required keys is more common) file in the root of the `python` folder with the following content: -# Optionally, you can install the pre-commit hooks -poetry run pre-commit install -# this will run linters and mypy checks on all the changed code. +Content of `openai.env`: +```env +OPENAI_API_KEY="" +OPENAI_CHAT_MODEL_ID="gpt-4o-mini" ``` -## VSCode Setup - -Open the [workspace](https://code.visualstudio.com/docs/editor/workspaces) in VSCode. -> The Python workspace is the `./python` folder if you are at the root of the repository. +You will then configure the ChatCompletion class with the keyword argument `env_file_path`: -Open any of the `.py` files in the project and run the `Python: Select Interpreter` -command from the command palette. Make sure the virtual env (venv) created by -`poetry` is selected. -The python you're looking for should be under `~/.cache/pypoetry/virtualenvs/semantic-kernel-.../bin/python`. - -If prompted, install `ruff`. (It should have been installed as part of `poetry install`). - -You also need to install the `ruff` extension in VSCode so that auto-formatting uses the `ruff` formatter on save. -Read more about the extension here: https://github.com/astral-sh/ruff-vscode +```python +chat_completion = OpenAIChatCompletion(service_id="test", env_file_path="openai.env") +``` ## Tests You can run the unit tests under the [tests/unit](tests/unit/) folder. ```bash - poetry install --with unit-tests - poetry run pytest tests/unit + uv run pytest tests/unit ``` Alternatively, you can run them using VSCode Tasks. Open the command palette @@ -167,21 +129,18 @@ Alternatively, you can run them using VSCode Tasks. Open the command palette You can run the integration tests under the [tests/integration](tests/integration/) folder. ```bash - poetry install --with tests - poetry run pytest tests/integration + uv run pytest tests/integration ``` You can also run all the tests together under the [tests](tests/) folder. ```bash - poetry install - poetry run pytest tests + uv run pytest tests ``` Alternatively, you can run them using VSCode Tasks. Open the command palette (`Ctrl+Shift+P`) and type `Tasks: Run Task`. Select `Python: Tests - All` from the list. -## Tools and scripts ## Implementation Decisions @@ -203,9 +162,9 @@ They should contain: - If necessary to further explain the logic a newline follows the first line and then the explanation is given. - The following three sections are optional, and if used should be separated by a single empty line. - Arguments are then specified after a header called `Args:`, with each argument being specified in the following format: - - `arg_name` (`arg_type`): Explanation of the argument, arg_type is optional, as long as you are consistent. + - `arg_name`: Explanation of the argument. - if a longer explanation is needed for a argument, it should be placed on the next line, indented by 4 spaces. - - Default values do not have to be specified, they will be pulled from the definition. + - Type and default values do not have to be specified, they will be pulled from the definition. - Returns are specified after a header called `Returns:` or `Yields:`, with the return type and explanation of the return value. - Finally, a header for exceptions can be added, called `Raises:`, with each exception being specified in the following format: - `ExceptionType`: Explanation of the exception. @@ -227,12 +186,12 @@ def equal(arg1: str, arg2: str) -> bool: Here is extra explanation of the logic involved. Args: - arg1 (str): The first string to compare. - arg2 (str): The second string to compare. + arg1: The first string to compare. + arg2: The second string to compare. This string requires extra explanation. Returns: - bool: True if the strings are the same, False otherwise. + True if the strings are the same, False otherwise. Raises: ValueError: If one of the strings is empty. @@ -244,11 +203,8 @@ If in doubt, use the link above to read much more considerations of what to do a ## Pydantic and Serialization -[Pydantic Documentation](https://docs.pydantic.dev/1.10/) - -### Overview - This section describes how one can enable serialization for their class using Pydantic. +For more info you can refer to the [Pydantic Documentation](https://docs.pydantic.dev/latest/). ### Upgrading existing classes to use Pydantic @@ -263,7 +219,7 @@ class A: self.d = d ``` -You would convert this to a Pydantic class by subclassing from the `KernelBaseModel` class. +You would convert this to a Pydantic class by sub-classing from the `KernelBaseModel` class. ```python from pydantic import Field @@ -298,10 +254,13 @@ class A: You can use the `KernelBaseModel` to convert these to pydantic serializable classes. ```python -from typing import Generic +from typing import Generic, TypeVar from semantic_kernel.kernel_pydantic import KernelBaseModel +T1 = TypeVar("T1") +T2 = TypeVar("T2", bound=) + class A(KernelBaseModel, Generic[T1, T2]): # T1 and T2 must be specified in the Generic argument otherwise, pydantic will # NOT be able to serialize this class @@ -310,32 +269,31 @@ class A(KernelBaseModel, Generic[T1, T2]): c: T2 ``` -## Pipeline checks +## Code quality checks -To run the same checks that run during the GitHub Action build, you can use -this command, from the [python](../python) folder: +To run the same checks that run during a commit and the GitHub Action `Python Code Quality Checks`, you can use this command, from the [python](../python) folder: ```bash - poetry run pre-commit run -a + uv run pre-commit run -a ``` or use the following task (using `Ctrl+Shift+P`): - `Python - Run Checks` to run the checks on the whole project. - `Python - Run Checks - Staged` to run the checks on the currently staged files only. -Ideally you should run these checks before committing any changes, use `poetry run pre-commit install` to set that up. +Ideally you should run these checks before committing any changes, when you install using the instructions above the pre-commit hooks should be installed already. ## Code Coverage We try to maintain a high code coverage for the project. To run the code coverage on the unit tests, you can use the following command: ```bash - poetry run pytest --cov=semantic_kernel --cov-report=term-missing:skip-covered tests/unit/ + uv run pytest --cov=semantic_kernel --cov-report=term-missing:skip-covered tests/unit/ ``` or use the following task (using `Ctrl+Shift+P`): - `Python: Tests - Code Coverage` to run the code coverage on the whole project. -This will show you which files are not covered by the tests, including the specific lines not covered. +This will show you which files are not covered by the tests, including the specific lines not covered. Make sure to consider the untested lines from the code you are working on, but feel free to add other tests as well, that is always welcome! ## Catching up with the latest changes There are many people committing to Semantic Kernel, so it is important to keep your local repository up to date. To do this, you can run the following commands: diff --git a/python/LICENSE b/python/LICENSE new file mode 100644 index 000000000000..9e841e7a26e4 --- /dev/null +++ b/python/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/python/Makefile b/python/Makefile index 8fdeec500379..1e165a1539ba 100644 --- a/python/Makefile +++ b/python/Makefile @@ -1,54 +1,74 @@ -SHELL = bash +SHELL = /bin/bash -.PHONY: help install recreate-env pre-commit +.PHONY: help install clean build +.SILENT: +all: install -help: - @echo -e "\033[1mUSAGE:\033[0m" - @echo " make [target]" - @echo "" - @echo -e "\033[1mTARGETS:\033[0m" - @echo " install - install Poetry and project dependencies" - @echo " install-pre-commit - install and configure pre-commit hooks" - @echo " pre-commit - run pre-commit hooks on all files" - @echo " recreate-env - destroy and recreate Poetry's virtualenv" +ifeq ($(PYTHON_VERSION),) + PYTHON_VERSION="3.10" +endif .ONESHELL: +help: + echo -e "\033[1mUSAGE:\033[0m" + echo " make [target]" + echo "" + echo -e "\033[1mTARGETS:\033[0m" + echo " help - show this help message" + echo " install - install uv, python, Semantic Kernel and all dependencies" + echo " This is the default and will use Python 3.10." + echo " install-uv - install uv" + echo " install-python - install python distributions" + echo " install-sk - install Semantic Kernel and all dependencies" + echo " install-pre-commit - install pre-commit hooks" + echo " clean - remove the virtualenvs" + echo " build - build the project" + echo "" + echo -e "\033[1mVARIABLES:\033[0m" + echo " PYTHON_VERSION - Python version to use. Default is 3.10" + echo " By default, 3.10, 3.11 and 3.12 are installed as well." + install: - @# Check to make sure Python is installed - @if ! command -v python3 &> /dev/null - then - echo "Python could not be found" - echo "Please install Python" - exit 1 - fi - - @# Check if Poetry is installed - @if ! command -v poetry &> /dev/null - then - echo "Poetry could not be found" - echo "Installing Poetry" - curl -sSL https://install.python-poetry.org | python3 - - fi - - # Install the dependencies - poetry install + make install-uv + make install-python + make install-sk + make install-pre-commit + +UV_VERSION = $(shell uv --version 2> /dev/null) +install-uv: +# Check if uv is installed +ifdef UV_VERSION + echo "uv found $(UV_VERSION)" + echo "running uv update" + uv self update +else + echo "uv could not be found" + echo "Installing uv" + curl -LsSf https://astral.sh/uv/install.sh | sh +endif .ONESHELL: -recreate-env: - # Stop the current virtualenv if active or alternative use - # `exit` to exit from a Poetry shell session - (deactivate || exit 0) +install-python: + echo "Installing python 3.10, 3.11, 3.12" + uv python install 3.10 3.11 3.12 - # Remove all the files of the current environment of the folder we are in - export POETRY_LOCATION=$$(poetry env info -p) - echo "Poetry is $${POETRY_LOCATION}" - rm -rf "$${POETRY_LOCATION}" +.ONESHELL: +install-pre-commit: + echo "Installing pre-commit hooks" + uv run pre-commit install -c python/.pre-commit-config.yaml -pre-commit: - poetry run pre-commit run --all-files -c .conf/.pre-commit-config.yaml .ONESHELL: -install-pre-commit: - poetry run pre-commit install - # Edit the pre-commit config file to change the config path - sed -i 's|\.pre-commit-config\.yaml|\.conf/\.pre-commit-config\.yaml|g' .git/hooks/pre-commit +install-sk: + echo "Creating and activating venv for python $(PYTHON_VERSION)" + uv venv --python $(PYTHON_VERSION) + echo "Installing Semantic Kernel and all dependencies" + uv sync --all-extras --dev + +.ONESHELL: +clean: + # Remove the virtualenv + rm -rf .venv + +build: + uvx --from build pyproject-build --installer uv diff --git a/python/README.md b/python/README.md index db821e29dde8..0624f34a032f 100644 --- a/python/README.md +++ b/python/README.md @@ -1,17 +1,18 @@ # Get Started with Semantic Kernel โšก Install the latest package: - - python -m pip install --upgrade semantic-kernel - +```bash +python -m pip install --upgrade semantic-kernel +``` If you want to use some of the optional dependencies (OpenAI is installed by default), you can install them with: - - python -m pip install --upgrade semantic-kernel[hugging_face] +```bash +python -m pip install --upgrade semantic-kernel[hugging_face] +``` or all of them: - - python -m pip install --upgrade semantic-kernel[all] - +```bash +python -m pip install --upgrade semantic-kernel[all] +``` # AI Services ## OpenAI / Azure OpenAI API keys @@ -26,7 +27,7 @@ There are two methods to manage keys, secrets, and endpoints: 2. If you'd like to use the `.env` file, you will need to configure the `.env` file with the following keys in the file (see the `.env.example` file): -``` +```bash OPENAI_API_KEY="" OPENAI_ORG_ID="" AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="" diff --git a/python/log.txt b/python/log.txt deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/python/mypy.ini b/python/mypy.ini index 9f392f90a3ab..fae1fd597ab6 100644 --- a/python/mypy.ini +++ b/python/mypy.ini @@ -1,6 +1,5 @@ [mypy] -python_version = 3.11 plugins = pydantic.mypy ignore_missing_imports = true diff --git a/python/poetry.lock b/python/poetry.lock deleted file mode 100644 index 3781e85f22e4..000000000000 --- a/python/poetry.lock +++ /dev/null @@ -1,7715 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "accelerate" -version = "0.33.0" -description = "Accelerate" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "accelerate-0.33.0-py3-none-any.whl", hash = "sha256:0a7f33d60ba09afabd028d4f0856dd19c5a734b7a596d637d9dd6e3d0eadbaf3"}, - {file = "accelerate-0.33.0.tar.gz", hash = "sha256:11ba481ed6ea09191775df55ce464aeeba67a024bd0261a44b77b30fb439e26a"}, -] - -[package.dependencies] -huggingface-hub = ">=0.21.0" -numpy = ">=1.17,<2.0.0" -packaging = ">=20.0" -psutil = "*" -pyyaml = "*" -safetensors = ">=0.3.1" -torch = ">=1.10.0" - -[package.extras] -deepspeed = ["deepspeed (<=0.14.0)"] -dev = ["bitsandbytes", "black (>=23.1,<24.0)", "datasets", "diffusers", "evaluate", "hf-doc-builder (>=0.3.0)", "parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "pytest-xdist", "rich", "ruff (>=0.2.1,<0.3.0)", "scikit-learn", "scipy", "timm", "torchpippy (>=0.2.0)", "tqdm", "transformers"] -quality = ["black (>=23.1,<24.0)", "hf-doc-builder (>=0.3.0)", "ruff (>=0.2.1,<0.3.0)"] -rich = ["rich"] -sagemaker = ["sagemaker"] -test-dev = ["bitsandbytes", "datasets", "diffusers", "evaluate", "scikit-learn", "scipy", "timm", "torchpippy (>=0.2.0)", "tqdm", "transformers"] -test-prod = ["parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "pytest-xdist"] -test-trackers = ["comet-ml", "dvclive", "tensorboard", "wandb"] -testing = ["bitsandbytes", "datasets", "diffusers", "evaluate", "parameterized", "pytest (>=7.2.0,<=8.0.0)", "pytest-subtests", "pytest-xdist", "scikit-learn", "scipy", "timm", "torchpippy (>=0.2.0)", "tqdm", "transformers"] - -[[package]] -name = "aiohappyeyeballs" -version = "2.3.5" -description = "Happy Eyeballs for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, -] - -[[package]] -name = "aiohttp" -version = "3.10.5" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, - {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, - {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, - {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, - {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, - {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, - {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, - {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, - {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, - {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, - {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, - {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, - {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, - {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, - {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, - {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, - {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, - {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, - {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, - {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, - {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, - {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, - {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, - {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, - {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, - {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, - {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, - {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, - {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, - {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, - {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, - {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, - {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, -] - -[package.dependencies] -aiohappyeyeballs = ">=2.3.0" -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anthropic" -version = "0.32.0" -description = "The official Python library for the anthropic API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "anthropic-0.32.0-py3-none-any.whl", hash = "sha256:302c7c652b05a26c418f70697b585d7b47daac36210d097a0daa45ecda89f258"}, - {file = "anthropic-0.32.0.tar.gz", hash = "sha256:1027bddeb7c3cbcb5e16d5e3b4d4a8d17b6258ca2fb4298bf91cc69adb148452"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tokenizers = ">=0.13.0" -typing-extensions = ">=4.7,<5" - -[package.extras] -bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] -vertex = ["google-auth (>=2,<3)"] - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "asgiref" -version = "3.8.1" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.8" -files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "authlib" -version = "1.3.1" -description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377"}, - {file = "authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917"}, -] - -[package.dependencies] -cryptography = "*" - -[[package]] -name = "azure-ai-inference" -version = "1.0.0b3" -description = "Microsoft Azure Ai Inference Client Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-ai-inference-1.0.0b3.tar.gz", hash = "sha256:1e99dc74c3b335a457500311bbbadb348f54dc4c12252a93cb8ab78d6d217ff0"}, - {file = "azure_ai_inference-1.0.0b3-py3-none-any.whl", hash = "sha256:6734ca7334c809a170beb767f1f1455724ab3f006cb60045e42a833c0e764403"}, -] - -[package.dependencies] -azure-core = ">=1.30.0" -isodate = ">=0.6.1" -typing-extensions = ">=4.6.0" - -[[package]] -name = "azure-common" -version = "1.1.28" -description = "Microsoft Azure Client Library for Python (Common)" -optional = false -python-versions = "*" -files = [ - {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, - {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, -] - -[[package]] -name = "azure-core" -version = "1.30.2" -description = "Microsoft Azure Core Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472"}, - {file = "azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a"}, -] - -[package.dependencies] -requests = ">=2.21.0" -six = ">=1.11.0" -typing-extensions = ">=4.6.0" - -[package.extras] -aio = ["aiohttp (>=3.0)"] - -[[package]] -name = "azure-cosmos" -version = "4.7.0" -description = "Microsoft Azure Cosmos Client Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-cosmos-4.7.0.tar.gz", hash = "sha256:72d714033134656302a2e8957c4b93590673bd288b0ca60cb123e348ae99a241"}, - {file = "azure_cosmos-4.7.0-py3-none-any.whl", hash = "sha256:03d8c7740ddc2906fb16e07b136acc0fe6a6a02656db46c5dd6f1b127b58cc96"}, -] - -[package.dependencies] -azure-core = ">=1.25.1" -typing-extensions = ">=4.6.0" - -[[package]] -name = "azure-identity" -version = "1.17.1" -description = "Microsoft Azure Identity Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea"}, - {file = "azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382"}, -] - -[package.dependencies] -azure-core = ">=1.23.0" -cryptography = ">=2.5" -msal = ">=1.24.0" -msal-extensions = ">=0.3.0" -typing-extensions = ">=4.0.0" - -[[package]] -name = "azure-search-documents" -version = "11.6.0b4" -description = "Microsoft Azure Cognitive Search Client Library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure-search-documents-11.6.0b4.tar.gz", hash = "sha256:b09fc3fa2813e83e7177874b352c84462fb86934d9f4299775361e1dfccc3f8f"}, - {file = "azure_search_documents-11.6.0b4-py3-none-any.whl", hash = "sha256:9590392464f882762ce6bad03613c822d4423f09f311c275b833de25398c00c1"}, -] - -[package.dependencies] -azure-common = ">=1.1" -azure-core = ">=1.28.0" -isodate = ">=0.6.0" - -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - -[[package]] -name = "bcrypt" -version = "4.2.0" -description = "Modern password hashing for your software and your servers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, - {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, - {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, - {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, - {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, - {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, - {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, -] - -[package.extras] -tests = ["pytest (>=3.2.1,!=3.3.0)"] -typecheck = ["mypy"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "build" -version = "1.2.1" -description = "A simple, correct Python build frontend" -optional = false -python-versions = ">=3.8" -files = [ - {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, - {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} -packaging = ">=19.1" -pyproject_hooks = "*" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] -typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] -uv = ["uv (>=0.1.18)"] -virtualenv = ["virtualenv (>=20.0.35)"] - -[[package]] -name = "cachetools" -version = "5.4.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, -] - -[[package]] -name = "certifi" -version = "2024.7.4" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, -] - -[[package]] -name = "cffi" -version = "1.17.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "cheap-repr" -version = "0.5.2" -description = "Better version of repr/reprlib for short, cheap string representations." -optional = false -python-versions = "*" -files = [ - {file = "cheap_repr-0.5.2-py2.py3-none-any.whl", hash = "sha256:537ec1991bfee885c13c6d473afd110a408e039cde26882e95bf92761556ab6e"}, - {file = "cheap_repr-0.5.2.tar.gz", hash = "sha256:001a5cf8adb0305c7ad3152c5f776040ac2a559d97f85770cebcb28c6ca5a30f"}, -] - -[package.extras] -tests = ["Django", "numpy (>=1.16.3)", "pandas (>=0.24.2)", "pytest"] - -[[package]] -name = "chroma-hnswlib" -version = "0.7.6" -description = "Chromas fork of hnswlib" -optional = false -python-versions = "*" -files = [ - {file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f35192fbbeadc8c0633f0a69c3d3e9f1a4eab3a46b65458bbcbcabdd9e895c36"}, - {file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f007b608c96362b8f0c8b6b2ac94f67f83fcbabd857c378ae82007ec92f4d82"}, - {file = "chroma_hnswlib-0.7.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:456fd88fa0d14e6b385358515aef69fc89b3c2191706fd9aee62087b62aad09c"}, - {file = "chroma_hnswlib-0.7.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dfaae825499c2beaa3b75a12d7ec713b64226df72a5c4097203e3ed532680da"}, - {file = "chroma_hnswlib-0.7.6-cp310-cp310-win_amd64.whl", hash = "sha256:2487201982241fb1581be26524145092c95902cb09fc2646ccfbc407de3328ec"}, - {file = "chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca"}, - {file = "chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f"}, - {file = "chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170"}, - {file = "chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9"}, - {file = "chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3"}, - {file = "chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7"}, - {file = "chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912"}, - {file = "chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4"}, - {file = "chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5"}, - {file = "chroma_hnswlib-0.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2fe6ea949047beed19a94b33f41fe882a691e58b70c55fdaa90274ae78be046f"}, - {file = "chroma_hnswlib-0.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feceff971e2a2728c9ddd862a9dd6eb9f638377ad98438876c9aeac96c9482f5"}, - {file = "chroma_hnswlib-0.7.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb0633b60e00a2b92314d0bf5bbc0da3d3320be72c7e3f4a9b19f4609dc2b2ab"}, - {file = "chroma_hnswlib-0.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a566abe32fab42291f766d667bdbfa234a7f457dcbd2ba19948b7a978c8ca624"}, - {file = "chroma_hnswlib-0.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6be47853d9a58dedcfa90fc846af202b071f028bbafe1d8711bf64fe5a7f6111"}, - {file = "chroma_hnswlib-0.7.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a7af35bdd39a88bffa49f9bb4bf4f9040b684514a024435a1ef5cdff980579d"}, - {file = "chroma_hnswlib-0.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a53b1f1551f2b5ad94eb610207bde1bb476245fc5097a2bec2b476c653c58bde"}, - {file = "chroma_hnswlib-0.7.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3085402958dbdc9ff5626ae58d696948e715aef88c86d1e3f9285a88f1afd3bc"}, - {file = "chroma_hnswlib-0.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:77326f658a15adfb806a16543f7db7c45f06fd787d699e643642d6bde8ed49c4"}, - {file = "chroma_hnswlib-0.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:93b056ab4e25adab861dfef21e1d2a2756b18be5bc9c292aa252fa12bb44e6ae"}, - {file = "chroma_hnswlib-0.7.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fe91f018b30452c16c811fd6c8ede01f84e5a9f3c23e0758775e57f1c3778871"}, - {file = "chroma_hnswlib-0.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c0e627476f0f4d9e153420d36042dd9c6c3671cfd1fe511c0253e38c2a1039"}, - {file = "chroma_hnswlib-0.7.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e9796a4536b7de6c6d76a792ba03e08f5aaa53e97e052709568e50b4d20c04f"}, - {file = "chroma_hnswlib-0.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:d30e2db08e7ffdcc415bd072883a322de5995eb6ec28a8f8c054103bbd3ec1e0"}, - {file = "chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7"}, -] - -[package.dependencies] -numpy = "*" - -[[package]] -name = "chromadb" -version = "0.5.5" -description = "Chroma." -optional = false -python-versions = ">=3.8" -files = [ - {file = "chromadb-0.5.5-py3-none-any.whl", hash = "sha256:2a5a4b84cb0fc32b380e193be68cdbadf3d9f77dbbf141649be9886e42910ddd"}, - {file = "chromadb-0.5.5.tar.gz", hash = "sha256:84f4bfee320fb4912cbeb4d738f01690891e9894f0ba81f39ee02867102a1c4d"}, -] - -[package.dependencies] -bcrypt = ">=4.0.1" -build = ">=1.0.3" -chroma-hnswlib = "0.7.6" -fastapi = ">=0.95.2" -grpcio = ">=1.58.0" -httpx = ">=0.27.0" -importlib-resources = "*" -kubernetes = ">=28.1.0" -mmh3 = ">=4.0.1" -numpy = ">=1.22.5,<2.0.0" -onnxruntime = ">=1.14.1" -opentelemetry-api = ">=1.2.0" -opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" -opentelemetry-instrumentation-fastapi = ">=0.41b0" -opentelemetry-sdk = ">=1.2.0" -orjson = ">=3.9.12" -overrides = ">=7.3.1" -posthog = ">=2.4.0" -pydantic = ">=1.9" -pypika = ">=0.48.9" -PyYAML = ">=6.0.0" -tenacity = ">=8.2.3" -tokenizers = ">=0.13.2" -tqdm = ">=4.65.0" -typer = ">=0.9.0" -typing-extensions = ">=4.5.0" -uvicorn = {version = ">=0.18.3", extras = ["standard"]} - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "coloredlogs" -version = "15.0.1" -description = "Colored terminal output for Python's logging module" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, - {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, -] - -[package.dependencies] -humanfriendly = ">=9.1" - -[package.extras] -cron = ["capturer (>=2.4)"] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "coverage" -version = "7.6.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "cryptography" -version = "43.0.0" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "debugpy" -version = "1.8.5" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - -[[package]] -name = "distlib" -version = "0.3.8" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, -] - -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "docstring-parser" -version = "0.16" -description = "Parse Python docstrings in reST, Google and Numpydoc format" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, - {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, -] - -[[package]] -name = "environs" -version = "9.5.0" -description = "simplified environment variable parsing" -optional = false -python-versions = ">=3.6" -files = [ - {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, - {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, -] - -[package.dependencies] -marshmallow = ">=3.0.0" -python-dotenv = "*" - -[package.extras] -dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] -tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "execnet" -version = "2.1.1" -description = "execnet: rapid multi-Python deployment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, - {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, -] - -[package.extras] -testing = ["hatch", "pre-commit", "pytest", "tox"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastapi" -version = "0.112.0" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi-0.112.0-py3-none-any.whl", hash = "sha256:3487ded9778006a45834b8c816ec4a48d522e2631ca9e75ec5a774f1b052f821"}, - {file = "fastapi-0.112.0.tar.gz", hash = "sha256:d262bc56b7d101d1f4e8fc0ad2ac75bb9935fec504d2b7117686cec50710cf05"}, -] - -[package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" -typing-extensions = ">=4.8.0" - -[package.extras] -all = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -standard = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "fastjsonschema" -version = "2.20.0" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.15.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "flatbuffers" -version = "24.3.25" -description = "The FlatBuffers serialization format for Python" -optional = false -python-versions = "*" -files = [ - {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, - {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.6.1" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -dev = ["pre-commit", "ruff"] -doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] -test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] -tqdm = ["tqdm"] - -[[package]] -name = "google-ai-generativelanguage" -version = "0.6.6" -description = "Google Ai Generativelanguage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-ai-generativelanguage-0.6.6.tar.gz", hash = "sha256:1739f035caeeeca5c28f887405eec8690f3372daf79fecf26454a97a4f1733a8"}, - {file = "google_ai_generativelanguage-0.6.6-py3-none-any.whl", hash = "sha256:59297737931f073d55ce1268dcc6d95111ee62850349d2b6cde942b16a4fca5c"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-api-core" -version = "2.19.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-api-python-client" -version = "2.140.0" -description = "Google API Client Library for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_api_python_client-2.140.0-py2.py3-none-any.whl", hash = "sha256:aeb4bb99e9fdd241473da5ff35464a0658fea0db76fe89c0f8c77ecfc3813404"}, - {file = "google_api_python_client-2.140.0.tar.gz", hash = "sha256:0bb973adccbe66a3d0a70abe4e49b3f2f004d849416bfec38d22b75649d389d8"}, -] - -[package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" -google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" -google-auth-httplib2 = ">=0.2.0,<1.0.0" -httplib2 = ">=0.19.0,<1.dev0" -uritemplate = ">=3.0.1,<5" - -[[package]] -name = "google-auth" -version = "2.33.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_auth-2.33.0-py2.py3-none-any.whl", hash = "sha256:8eff47d0d4a34ab6265c50a106a3362de6a9975bb08998700e389f857e4d39df"}, - {file = "google_auth-2.33.0.tar.gz", hash = "sha256:d6a52342160d7290e334b4d47ba390767e4438ad0d45b7630774533e82655b95"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-auth-httplib2" -version = "0.2.0" -description = "Google Authentication Library: httplib2 transport" -optional = false -python-versions = "*" -files = [ - {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, - {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, -] - -[package.dependencies] -google-auth = "*" -httplib2 = ">=0.19.0" - -[[package]] -name = "google-cloud-aiplatform" -version = "1.62.0" -description = "Vertex AI API client library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "google-cloud-aiplatform-1.62.0.tar.gz", hash = "sha256:e15d5b2a99e30d4a16f4c51cfb8129962e6da41a9027d2ea696abe0e2f006fe8"}, - {file = "google_cloud_aiplatform-1.62.0-py2.py3-none-any.whl", hash = "sha256:d7738e0fd4494a54ae08a51755a2143d58937cba2db826189771f45566c9ee3c"}, -] - -[package.dependencies] -docstring-parser = "<1" -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" -google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" -google-cloud-storage = ">=1.32.0,<3.0.0dev" -packaging = ">=14.3" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" -pydantic = "<3" -shapely = "<3.0.0dev" - -[package.extras] -autologging = ["mlflow (>=1.27.0,<=2.1.1)"] -cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] -endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] -langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "orjson (<=3.10.6)", "tenacity (<=8.3)"] -langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "orjson (<=3.10.6)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"] -lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] -metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] -pipelines = ["pyyaml (>=5.3.1,<7)"] -prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] -preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] -private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] -rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"] -ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"] -ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] -reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"] -tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] -tokenization = ["sentencepiece (>=0.2.0)"] -vizier = ["google-vizier (>=0.1.6)"] -xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] - -[[package]] -name = "google-cloud-bigquery" -version = "3.25.0" -description = "Google BigQuery API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, - {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" -packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" - -[package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] -opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] - -[[package]] -name = "google-cloud-core" -version = "2.4.1" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, - {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] - -[[package]] -name = "google-cloud-resource-manager" -version = "1.12.5" -description = "Google Cloud Resource Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175"}, - {file = "google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "google-cloud-storage" -version = "2.18.2" -description = "Google Cloud Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166"}, - {file = "google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99"}, -] - -[package.dependencies] -google-api-core = ">=2.15.0,<3.0.0dev" -google-auth = ">=2.26.1,<3.0dev" -google-cloud-core = ">=2.3.0,<3.0dev" -google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.7.2" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -protobuf = ["protobuf (<6.0.0dev)"] -tracing = ["opentelemetry-api (>=1.1.0)"] - -[[package]] -name = "google-crc32c" -version = "1.5.0" -description = "A python wrapper of the C library 'Google CRC32C'" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] - -[package.extras] -testing = ["pytest"] - -[[package]] -name = "google-generativeai" -version = "0.7.2" -description = "Google Generative AI High level API client library and tools." -optional = false -python-versions = ">=3.9" -files = [ - {file = "google_generativeai-0.7.2-py3-none-any.whl", hash = "sha256:3117d1ebc92ee77710d4bc25ab4763492fddce9b6332eb25d124cf5d8b78b339"}, -] - -[package.dependencies] -google-ai-generativelanguage = "0.6.6" -google-api-core = "*" -google-api-python-client = "*" -google-auth = ">=2.15.0" -protobuf = "*" -pydantic = "*" -tqdm = "*" -typing-extensions = "*" - -[package.extras] -dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] - -[[package]] -name = "google-resumable-media" -version = "2.7.2" -description = "Utilities for Google Media Downloads and Resumable Uploads" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, - {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, -] - -[package.dependencies] -google-crc32c = ">=1.0,<2.0dev" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] -requests = ["requests (>=2.18.0,<3.0.0dev)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.63.2" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.1" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, - {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "grpcio" -version = "1.63.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.63.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2e93aca840c29d4ab5db93f94ed0a0ca899e241f2e8aec6334ab3575dc46125c"}, - {file = "grpcio-1.63.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:91b73d3f1340fefa1e1716c8c1ec9930c676d6b10a3513ab6c26004cb02d8b3f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b3afbd9d6827fa6f475a4f91db55e441113f6d3eb9b7ebb8fb806e5bb6d6bd0d"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f3f6883ce54a7a5f47db43289a0a4c776487912de1a0e2cc83fdaec9685cc9f"}, - {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8dae9cc0412cb86c8de5a8f3be395c5119a370f3ce2e69c8b7d46bb9872c8d"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08e1559fd3b3b4468486b26b0af64a3904a8dbc78d8d936af9c1cf9636eb3e8b"}, - {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c039ef01516039fa39da8a8a43a95b64e288f79f42a17e6c2904a02a319b357"}, - {file = "grpcio-1.63.0-cp310-cp310-win32.whl", hash = "sha256:ad2ac8903b2eae071055a927ef74121ed52d69468e91d9bcbd028bd0e554be6d"}, - {file = "grpcio-1.63.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2e44f59316716532a993ca2966636df6fbe7be4ab6f099de6815570ebe4383a"}, - {file = "grpcio-1.63.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f28f8b2db7b86c77916829d64ab21ff49a9d8289ea1564a2b2a3a8ed9ffcccd3"}, - {file = "grpcio-1.63.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65bf975639a1f93bee63ca60d2e4951f1b543f498d581869922910a476ead2f5"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b5194775fec7dc3dbd6a935102bb156cd2c35efe1685b0a46c67b927c74f0cfb"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4cbb2100ee46d024c45920d16e888ee5d3cf47c66e316210bc236d5bebc42b3"}, - {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff737cf29b5b801619f10e59b581869e32f400159e8b12d7a97e7e3bdeee6a2"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd1e68776262dd44dedd7381b1a0ad09d9930ffb405f737d64f505eb7f77d6c7"}, - {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f45f27f516548e23e4ec3fbab21b060416007dbe768a111fc4611464cc773f"}, - {file = "grpcio-1.63.0-cp311-cp311-win32.whl", hash = "sha256:878b1d88d0137df60e6b09b74cdb73db123f9579232c8456f53e9abc4f62eb3c"}, - {file = "grpcio-1.63.0-cp311-cp311-win_amd64.whl", hash = "sha256:756fed02dacd24e8f488f295a913f250b56b98fb793f41d5b2de6c44fb762434"}, - {file = "grpcio-1.63.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:93a46794cc96c3a674cdfb59ef9ce84d46185fe9421baf2268ccb556f8f81f57"}, - {file = "grpcio-1.63.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a7b19dfc74d0be7032ca1eda0ed545e582ee46cd65c162f9e9fc6b26ef827dc6"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8064d986d3a64ba21e498b9a376cbc5d6ab2e8ab0e288d39f266f0fca169b90d"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:219bb1848cd2c90348c79ed0a6b0ea51866bc7e72fa6e205e459fedab5770172"}, - {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2d60cd1d58817bc5985fae6168d8b5655c4981d448d0f5b6194bbcc038090d2"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e350cb096e5c67832e9b6e018cf8a0d2a53b2a958f6251615173165269a91b0"}, - {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:56cdf96ff82e3cc90dbe8bac260352993f23e8e256e063c327b6cf9c88daf7a9"}, - {file = "grpcio-1.63.0-cp312-cp312-win32.whl", hash = "sha256:3a6d1f9ea965e750db7b4ee6f9fdef5fdf135abe8a249e75d84b0a3e0c668a1b"}, - {file = "grpcio-1.63.0-cp312-cp312-win_amd64.whl", hash = "sha256:d2497769895bb03efe3187fb1888fc20e98a5f18b3d14b606167dacda5789434"}, - {file = "grpcio-1.63.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fdf348ae69c6ff484402cfdb14e18c1b0054ac2420079d575c53a60b9b2853ae"}, - {file = "grpcio-1.63.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a3abfe0b0f6798dedd2e9e92e881d9acd0fdb62ae27dcbbfa7654a57e24060c0"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6ef0ad92873672a2a3767cb827b64741c363ebaa27e7f21659e4e31f4d750280"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b416252ac5588d9dfb8a30a191451adbf534e9ce5f56bb02cd193f12d8845b7f"}, - {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b77eaefc74d7eb861d3ffbdf91b50a1bb1639514ebe764c47773b833fa2d91"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b005292369d9c1f80bf70c1db1c17c6c342da7576f1c689e8eee4fb0c256af85"}, - {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdcda1156dcc41e042d1e899ba1f5c2e9f3cd7625b3d6ebfa619806a4c1aadda"}, - {file = "grpcio-1.63.0-cp38-cp38-win32.whl", hash = "sha256:01799e8649f9e94ba7db1aeb3452188048b0019dc37696b0f5ce212c87c560c3"}, - {file = "grpcio-1.63.0-cp38-cp38-win_amd64.whl", hash = "sha256:6a1a3642d76f887aa4009d92f71eb37809abceb3b7b5a1eec9c554a246f20e3a"}, - {file = "grpcio-1.63.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:75f701ff645858a2b16bc8c9fc68af215a8bb2d5a9b647448129de6e85d52bce"}, - {file = "grpcio-1.63.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cacdef0348a08e475a721967f48206a2254a1b26ee7637638d9e081761a5ba86"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:0697563d1d84d6985e40ec5ec596ff41b52abb3fd91ec240e8cb44a63b895094"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6426e1fb92d006e47476d42b8f240c1d916a6d4423c5258ccc5b105e43438f61"}, - {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48cee31bc5f5a31fb2f3b573764bd563aaa5472342860edcc7039525b53e46a"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50344663068041b34a992c19c600236e7abb42d6ec32567916b87b4c8b8833b3"}, - {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:259e11932230d70ef24a21b9fb5bb947eb4703f57865a404054400ee92f42f5d"}, - {file = "grpcio-1.63.0-cp39-cp39-win32.whl", hash = "sha256:a44624aad77bf8ca198c55af811fd28f2b3eaf0a50ec5b57b06c034416ef2d0a"}, - {file = "grpcio-1.63.0-cp39-cp39-win_amd64.whl", hash = "sha256:166e5c460e5d7d4656ff9e63b13e1f6029b122104c1633d5f37eaea348d7356d"}, - {file = "grpcio-1.63.0.tar.gz", hash = "sha256:f3023e14805c61bc439fb40ca545ac3d5740ce66120a678a3c6c2c55b70343d1"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.63.0)"] - -[[package]] -name = "grpcio-health-checking" -version = "1.62.3" -description = "Standard Health Checking Service for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-health-checking-1.62.3.tar.gz", hash = "sha256:5074ba0ce8f0dcfe328408ec5c7551b2a835720ffd9b69dade7fa3e0dc1c7a93"}, - {file = "grpcio_health_checking-1.62.3-py3-none-any.whl", hash = "sha256:f29da7dd144d73b4465fe48f011a91453e9ff6c8af0d449254cf80021cab3e0d"}, -] - -[package.dependencies] -grpcio = ">=1.62.3" -protobuf = ">=4.21.6" - -[[package]] -name = "grpcio-status" -version = "1.62.3" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"}, - {file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.3" -protobuf = ">=4.21.6" - -[[package]] -name = "grpcio-tools" -version = "1.62.3" -description = "Protobuf code generator for gRPC" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0a8c0c4724ae9c2181b7dbc9b186df46e4f62cb18dc184e46d06c0ebeccf569e"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5782883a27d3fae8c425b29a9d3dcf5f47d992848a1b76970da3b5a28d424b26"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d812daffd0c2d2794756bd45a353f89e55dc8f91eb2fc840c51b9f6be62667"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b47d0dda1bdb0a0ba7a9a6de88e5a1ed61f07fad613964879954961e36d49193"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca246dffeca0498be9b4e1ee169b62e64694b0f92e6d0be2573e65522f39eea9"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-win32.whl", hash = "sha256:6a56d344b0bab30bf342a67e33d386b0b3c4e65868ffe93c341c51e1a8853ca5"}, - {file = "grpcio_tools-1.62.3-cp310-cp310-win_amd64.whl", hash = "sha256:710fecf6a171dcbfa263a0a3e7070e0df65ba73158d4c539cec50978f11dad5d"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5"}, - {file = "grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b"}, - {file = "grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:ec6fbded0c61afe6f84e3c2a43e6d656791d95747d6d28b73eff1af64108c434"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:bfda6ee8990997a9df95c5606f3096dae65f09af7ca03a1e9ca28f088caca5cf"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b77f9f9cee87cd798f0fe26b7024344d1b03a7cd2d2cba7035f8433b13986325"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e02d3b96f2d0e4bab9ceaa30f37d4f75571e40c6272e95364bff3125a64d184"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1da38070738da53556a4b35ab67c1b9884a5dd48fa2f243db35dc14079ea3d0c"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ace43b26d88a58dcff16c20d23ff72b04d0a415f64d2820f4ff06b1166f50557"}, - {file = "grpcio_tools-1.62.3-cp37-cp37m-win_amd64.whl", hash = "sha256:350a80485e302daaa95d335a931f97b693e170e02d43767ab06552c708808950"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:c3a1ac9d394f8e229eb28eec2e04b9a6f5433fa19c9d32f1cb6066e3c5114a1d"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:11f363570dea661dde99e04a51bd108a5807b5df32a6f8bdf4860e34e94a4dbf"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9ad9950119d8ae27634e68b7663cc8d340ae535a0f80d85a55e56a6973ab1f"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c5d22b252dcef11dd1e0fbbe5bbfb9b4ae048e8880d33338215e8ccbdb03edc"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:27cd9ef5c5d68d5ed104b6dcb96fe9c66b82050e546c9e255716903c3d8f0373"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f4b1615adf67bd8bb71f3464146a6f9949972d06d21a4f5e87e73f6464d97f57"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-win32.whl", hash = "sha256:e18e15287c31baf574fcdf8251fb7f997d64e96c6ecf467906e576da0a079af6"}, - {file = "grpcio_tools-1.62.3-cp38-cp38-win_amd64.whl", hash = "sha256:6c3064610826f50bd69410c63101954676edc703e03f9e8f978a135f1aaf97c1"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:8e62cc7164b0b7c5128e637e394eb2ef3db0e61fc798e80c301de3b2379203ed"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c8ad5cce554e2fcaf8842dee5d9462583b601a3a78f8b76a153c38c963f58c10"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec279dcf3518201fc592c65002754f58a6b542798cd7f3ecd4af086422f33f29"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c989246c2aebc13253f08be32538a4039a64e12d9c18f6d662d7aee641dc8b5"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca4f5eeadbb57cf03317d6a2857823239a63a59cc935f5bd6cf6e8b7af7a7ecc"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0cb3a3436ac119cbd37a7d3331d9bdf85dad21a6ac233a3411dff716dcbf401e"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-win32.whl", hash = "sha256:3eae6ea76d62fcac091e1f15c2dcedf1dc3f114f8df1a972a8a0745e89f4cf61"}, - {file = "grpcio_tools-1.62.3-cp39-cp39-win_amd64.whl", hash = "sha256:eec73a005443061f4759b71a056f745e3b000dc0dc125c9f20560232dfbcbd14"}, -] - -[package.dependencies] -grpcio = ">=1.62.3" -protobuf = ">=4.21.6,<5.0dev" -setuptools = "*" - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "h2" -version = "4.1.0" -description = "HTTP/2 State-Machine based protocol implementation" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] - -[package.dependencies] -hpack = ">=4.0,<5" -hyperframe = ">=6.0,<7" - -[[package]] -name = "hiredis" -version = "3.0.0" -description = "Python wrapper for hiredis" -optional = false -python-versions = ">=3.8" -files = [ - {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:4b182791c41c5eb1d9ed736f0ff81694b06937ca14b0d4dadde5dadba7ff6dae"}, - {file = "hiredis-3.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:13c275b483a052dd645eb2cb60d6380f1f5215e4c22d6207e17b86be6dd87ffa"}, - {file = "hiredis-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1018cc7f12824506f165027eabb302735b49e63af73eb4d5450c66c88f47026"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83a29cc7b21b746cb6a480189e49f49b2072812c445e66a9e38d2004d496b81c"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e241fab6332e8fb5f14af00a4a9c6aefa22f19a336c069b7ddbf28ef8341e8d6"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fb8de899f0145d6c4d5d4bd0ee88a78eb980a7ffabd51e9889251b8f58f1785"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b23291951959141173eec10f8573538e9349fa27f47a0c34323d1970bf891ee5"}, - {file = "hiredis-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e421ac9e4b5efc11705a0d5149e641d4defdc07077f748667f359e60dc904420"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77c8006c12154c37691b24ff293c077300c22944018c3ff70094a33e10c1d795"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:41afc0d3c18b59eb50970479a9c0e5544fb4b95e3a79cf2fbaece6ddefb926fe"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:04ccae6dcd9647eae6025425ab64edb4d79fde8b9e6e115ebfabc6830170e3b2"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fe91d62b0594db5ea7d23fc2192182b1a7b6973f628a9b8b2e0a42a2be721ac6"}, - {file = "hiredis-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99516d99316062824a24d145d694f5b0d030c80da693ea6f8c4ecf71a251d8bb"}, - {file = "hiredis-3.0.0-cp310-cp310-win32.whl", hash = "sha256:562eaf820de045eb487afaa37e6293fe7eceb5b25e158b5a1974b7e40bf04543"}, - {file = "hiredis-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1c81c89ed765198da27412aa21478f30d54ef69bf5e4480089d9c3f77b8f882"}, - {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:4664dedcd5933364756d7251a7ea86d60246ccf73a2e00912872dacbfcef8978"}, - {file = "hiredis-3.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:47de0bbccf4c8a9f99d82d225f7672b9dd690d8fd872007b933ef51a302c9fa6"}, - {file = "hiredis-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e43679eca508ba8240d016d8cca9d27342d70184773c15bea78a23c87a1922f1"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13c345e7278c210317e77e1934b27b61394fee0dec2e8bd47e71570900f75823"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00018f22f38530768b73ea86c11f47e8d4df65facd4e562bd78773bd1baef35e"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ea3a86405baa8eb0d3639ced6926ad03e07113de54cb00fd7510cb0db76a89d"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c073848d2b1d5561f3903879ccf4e1a70c9b1e7566c7bdcc98d082fa3e7f0a1d"}, - {file = "hiredis-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a8dffb5f5b3415a4669d25de48b617fd9d44b0bccfc4c2ab24b06406ecc9ecb"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:22c17c96143c2a62dfd61b13803bc5de2ac526b8768d2141c018b965d0333b66"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c3ece960008dab66c6b8bb3a1350764677ee7c74ccd6270aaf1b1caf9ccebb46"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f75999ae00a920f7dce6ecae76fa5e8674a3110e5a75f12c7a2c75ae1af53396"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e069967cbd5e1900aafc4b5943888f6d34937fc59bf8918a1a546cb729b4b1e4"}, - {file = "hiredis-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0aacc0a78e1d94d843a6d191f224a35893e6bdfeb77a4a89264155015c65f126"}, - {file = "hiredis-3.0.0-cp311-cp311-win32.whl", hash = "sha256:719c32147ba29528cb451f037bf837dcdda4ff3ddb6cdb12c4216b0973174718"}, - {file = "hiredis-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:bdc144d56333c52c853c31b4e2e52cfbdb22d3da4374c00f5f3d67c42158970f"}, - {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:484025d2eb8f6348f7876fc5a2ee742f568915039fcb31b478fd5c242bb0fe3a"}, - {file = "hiredis-3.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fcdb552ffd97151dab8e7bc3ab556dfa1512556b48a367db94b5c20253a35ee1"}, - {file = "hiredis-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bb6f9fd92f147ba11d338ef5c68af4fd2908739c09e51f186e1d90958c68cc1"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa86bf9a0ed339ec9e8a9a9d0ae4dccd8671625c83f9f9f2640729b15e07fbfd"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e194a0d5df9456995d8f510eab9f529213e7326af6b94770abf8f8b7952ddcaa"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a1df39d74ec507d79c7a82c8063eee60bf80537cdeee652f576059b9cdd15c"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f91456507427ba36fd81b2ca11053a8e112c775325acc74e993201ea912d63e9"}, - {file = "hiredis-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9862db92ef67a8a02e0d5370f07d380e14577ecb281b79720e0d7a89aedb9ee5"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d10fcd9e0eeab835f492832b2a6edb5940e2f1230155f33006a8dfd3bd2c94e4"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48727d7d405d03977d01885f317328dc21d639096308de126c2c4e9950cbd3c9"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e0bb6102ebe2efecf8a3292c6660a0e6fac98176af6de67f020bea1c2343717"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:df274e3abb4df40f4c7274dd3e587dfbb25691826c948bc98d5fead019dfb001"}, - {file = "hiredis-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:034925b5fb514f7b11aac38cd55b3fd7e9d3af23bd6497f3f20aa5b8ba58e232"}, - {file = "hiredis-3.0.0-cp312-cp312-win32.whl", hash = "sha256:120f2dda469b28d12ccff7c2230225162e174657b49cf4cd119db525414ae281"}, - {file = "hiredis-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e584fe5f4e6681d8762982be055f1534e0170f6308a7a90f58d737bab12ff6a8"}, - {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:122171ff47d96ed8dd4bba6c0e41d8afaba3e8194949f7720431a62aa29d8895"}, - {file = "hiredis-3.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ba9fc605ac558f0de67463fb588722878641e6fa1dabcda979e8e69ff581d0bd"}, - {file = "hiredis-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a631e2990b8be23178f655cae8ac6c7422af478c420dd54e25f2e26c29e766f1"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63482db3fadebadc1d01ad33afa6045ebe2ea528eb77ccaabd33ee7d9c2bad48"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f669212c390eebfbe03c4e20181f5970b82c5d0a0ad1df1785f7ffbe7d61150"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a49ef161739f8018c69b371528bdb47d7342edfdee9ddc75a4d8caddf45a6e"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98a152052b8878e5e43a2e3a14075218adafc759547c98668a21e9485882696c"}, - {file = "hiredis-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50a196af0ce657fcde9bf8a0bbe1032e22c64d8fcec2bc926a35e7ff68b3a166"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f2f312eef8aafc2255e3585dcf94d5da116c43ef837db91db9ecdc1bc930072d"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6ca41fa40fa019cde42c21add74aadd775e71458051a15a352eabeb12eb4d084"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:6eecb343c70629f5af55a8b3e53264e44fa04e155ef7989de13668a0cb102a90"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:c3fdad75e7837a475900a1d3a5cc09aa024293c3b0605155da2d42f41bc0e482"}, - {file = "hiredis-3.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8854969e7480e8d61ed7549eb232d95082a743e94138d98d7222ba4e9f7ecacd"}, - {file = "hiredis-3.0.0-cp38-cp38-win32.whl", hash = "sha256:f114a6c86edbf17554672b050cce72abf489fe58d583c7921904d5f1c9691605"}, - {file = "hiredis-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:7d99b91e42217d7b4b63354b15b41ce960e27d216783e04c4a350224d55842a4"}, - {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:4c6efcbb5687cf8d2aedcc2c3ed4ac6feae90b8547427d417111194873b66b06"}, - {file = "hiredis-3.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5b5cff42a522a0d81c2ae7eae5e56d0ee7365e0c4ad50c4de467d8957aff4414"}, - {file = "hiredis-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:82f794d564f4bc76b80c50b03267fe5d6589e93f08e66b7a2f674faa2fa76ebc"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a4c1791d7aa7e192f60fe028ae409f18ccdd540f8b1e6aeb0df7816c77e4a4"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2537b2cd98192323fce4244c8edbf11f3cac548a9d633dbbb12b48702f379f4"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fed69bbaa307040c62195a269f82fc3edf46b510a17abb6b30a15d7dab548df"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:869f6d5537d243080f44253491bb30aa1ec3c21754003b3bddeadedeb65842b0"}, - {file = "hiredis-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d435ae89073d7cd51e6b6bf78369c412216261c9c01662e7008ff00978153729"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:204b79b30a0e6be0dc2301a4d385bb61472809f09c49f400497f1cdd5a165c66"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ea635101b739c12effd189cc19b2671c268abb03013fd1f6321ca29df3ca625"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f359175197fd833c8dd7a8c288f1516be45415bb5c939862ab60c2918e1e1943"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ac6d929cb33dd12ad3424b75725975f0a54b5b12dbff95f2a2d660c510aa106d"}, - {file = "hiredis-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:100431e04d25a522ef2c3b94f294c4219c4de3bfc7d557b6253296145a144c11"}, - {file = "hiredis-3.0.0-cp39-cp39-win32.whl", hash = "sha256:e1a9c14ae9573d172dc050a6f63a644457df5d01ec4d35a6a0f097f812930f83"}, - {file = "hiredis-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:54a6dd7b478e6eb01ce15b3bb5bf771e108c6c148315bf194eb2ab776a3cac4d"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50da7a9edf371441dfcc56288d790985ee9840d982750580710a9789b8f4a290"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b285ef6bf1581310b0d5e8f6ce64f790a1c40e89c660e1320b35f7515433672"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcfa684966f25b335072115de2f920228a3c2caf79d4bfa2b30f6e4f674a948"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a41be8af1fd78ca97bc948d789a09b730d1e7587d07ca53af05758f31f4b985d"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:038756db735e417ab36ee6fd7725ce412385ed2bd0767e8179a4755ea11b804f"}, - {file = "hiredis-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fcecbd39bd42cef905c0b51c9689c39d0cc8b88b1671e7f40d4fb213423aef3a"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a131377493a59fb0f5eaeb2afd49c6540cafcfba5b0b3752bed707be9e7c4eaf"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d22c53f0ec5c18ecb3d92aa9420563b1c5d657d53f01356114978107b00b860"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a91e9520fbc65a799943e5c970ffbcd67905744d8becf2e75f9f0a5e8414f0"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dc8043959b50141df58ab4f398e8ae84c6f9e673a2c9407be65fc789138f4a6"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51b99cfac514173d7b8abdfe10338193e8a0eccdfe1870b646009d2fb7cbe4b5"}, - {file = "hiredis-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:fa1fcad89d8a41d8dc10b1e54951ec1e161deabd84ed5a2c95c3c7213bdb3514"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:898636a06d9bf575d2c594129085ad6b713414038276a4bfc5db7646b8a5be78"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:466f836dbcf86de3f9692097a7a01533dc9926986022c6617dc364a402b265c5"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23142a8af92a13fc1e3f2ca1d940df3dcf2af1d176be41fe8d89e30a837a0b60"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:793c80a3d6b0b0e8196a2d5de37a08330125668c8012922685e17aa9108c33ac"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:467d28112c7faa29b7db743f40803d927c8591e9da02b6ce3d5fadc170a542a2"}, - {file = "hiredis-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dc384874a719c767b50a30750f937af18842ee5e288afba95a5a3ed703b1515a"}, - {file = "hiredis-3.0.0.tar.gz", hash = "sha256:fed8581ae26345dea1f1e0d1a96e05041a727a45e7d8d459164583e23c6ac441"}, -] - -[[package]] -name = "hpack" -version = "4.0.0" -description = "Pure-Python HPACK header compression" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httplib2" -version = "0.22.0" -description = "A comprehensive HTTP client library." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, - {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, -] - -[package.dependencies] -pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} - -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "huggingface-hub" -version = "0.24.5" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.24.5-py3-none-any.whl", hash = "sha256:d93fb63b1f1a919a22ce91a14518974e81fc4610bf344dfe7572343ce8d3aced"}, - {file = "huggingface_hub-0.24.5.tar.gz", hash = "sha256:7b45d6744dd53ce9cbf9880957de00e9d10a9ae837f1c9b7255fc8fa4e8264f3"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors[torch]", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "humanfriendly" -version = "10.0" -description = "Human friendly output for text interfaces using Python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, - {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, -] - -[package.dependencies] -pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} - -[[package]] -name = "hyperframe" -version = "6.0.1" -description = "HTTP/2 framing layer for Python" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] - -[[package]] -name = "identify" -version = "2.6.0" -description = "File identification library for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.0.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "importlib-resources" -version = "6.4.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.5" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipython" -version = "8.26.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, - {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jiter" -version = "0.5.0" -description = "Fast iterable JSON parser." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, - {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, - {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, - {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, - {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, - {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, - {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, - {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, - {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, - {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, - {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, - {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, - {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, - {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, - {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, - {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, - {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, - {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, - {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, - {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, - {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, - {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, - {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, - {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, - {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, - {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, - {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, - {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, - {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, - {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, - {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, -] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rpds-py = ">=0.7.1" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-path" -version = "0.3.3" -description = "JSONSchema Spec with object-oriented paths" -optional = false -python-versions = "<4.0.0,>=3.8.0" -files = [ - {file = "jsonschema_path-0.3.3-py3-none-any.whl", hash = "sha256:203aff257f8038cd3c67be614fe6b2001043408cb1b4e36576bc4921e09d83c4"}, - {file = "jsonschema_path-0.3.3.tar.gz", hash = "sha256:f02e5481a4288ec062f8e68c808569e427d905bedfecb7f2e4c69ef77957c382"}, -] - -[package.dependencies] -pathable = ">=0.4.1,<0.5.0" -PyYAML = ">=5.1" -referencing = ">=0.28.0,<0.36.0" -requests = ">=2.31.0,<3.0.0" - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "jupyter-client" -version = "8.6.2" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, - {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "kubernetes" -version = "30.1.0" -description = "Kubernetes python client" -optional = false -python-versions = ">=3.6" -files = [ - {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, - {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4.1" -requests = "*" -requests-oauthlib = "*" -six = ">=1.9.0" -urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" - -[package.extras] -adal = ["adal (>=1.0.2)"] - -[[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.21.3" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "microsoft-kiota-abstractions" -version = "1.3.3" -description = "Core abstractions for kiota generated libraries in Python" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_abstractions-1.3.3-py2.py3-none-any.whl", hash = "sha256:deced0b01249459426d4ed45c8ab34e19250e514d4d05ce84c08893058ae06a1"}, - {file = "microsoft_kiota_abstractions-1.3.3.tar.gz", hash = "sha256:3cc01832a2e6dc6094c4e1abf7cbef3849a87d818a3b9193ad6c83a9f88e14ff"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.19.0" -opentelemetry-sdk = ">=1.19.0" -std-uritemplate = ">=0.0.38" - -[[package]] -name = "microsoft-kiota-authentication-azure" -version = "1.0.0" -description = "Authentication provider for Kiota using Azure Identity" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_authentication_azure-1.0.0-py2.py3-none-any.whl", hash = "sha256:289fe002951ae661415a6d3fa7c422c096b739165acb32d786316988120a1b27"}, - {file = "microsoft_kiota_authentication_azure-1.0.0.tar.gz", hash = "sha256:752304f8d94b884cfec12583dd763ec0478805c7f80b29344e78c6d55a97bd01"}, -] - -[package.dependencies] -aiohttp = ">=3.8.0" -azure-core = ">=1.21.1" -microsoft-kiota-abstractions = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.20.0" -opentelemetry-sdk = ">=1.20.0" - -[[package]] -name = "microsoft-kiota-http" -version = "1.3.3" -description = "Kiota http request adapter implementation for httpx library" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_http-1.3.3-py2.py3-none-any.whl", hash = "sha256:21109a34140bf42e18855b7cf983939b891ae30739f21a9ce045c3a715f325fd"}, - {file = "microsoft_kiota_http-1.3.3.tar.gz", hash = "sha256:0b40f37c6c158c2e5b2dffa963a7fc342d368c1a64b8cca08631ba19d0ff94a9"}, -] - -[package.dependencies] -httpx = {version = ">=0.23.0", extras = ["http2"]} -microsoft-kiota_abstractions = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.20.0" -opentelemetry-sdk = ">=1.20.0" - -[[package]] -name = "microsoft-kiota-serialization-form" -version = "0.1.0" -description = "Implementation of Kiota Serialization Interfaces for URI-Form encoded serialization" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_serialization_form-0.1.0-py2.py3-none-any.whl", hash = "sha256:5bc76fb2fc67d7c1f878f876d252ea814e4fc38df505099b9b86de52d974380a"}, - {file = "microsoft_kiota_serialization_form-0.1.0.tar.gz", hash = "sha256:663ece0cb1a41fe9ddfc9195aa3f15f219e14d2a1ee51e98c53ad8d795b2785d"}, -] - -[package.dependencies] -microsoft-kiota_abstractions = ">=1.0.0,<2.0.0" -pendulum = ">=3.0.0" - -[[package]] -name = "microsoft-kiota-serialization-json" -version = "1.3.0" -description = "Implementation of Kiota Serialization interfaces for JSON" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_serialization_json-1.3.0-py2.py3-none-any.whl", hash = "sha256:fbf82835d8b77ef21b496aa711a512fe4494fa94dfe88f7fd014dffe33778e20"}, - {file = "microsoft_kiota_serialization_json-1.3.0.tar.gz", hash = "sha256:235b680e6eb646479ffb7b59d2a6f0216c4f7e1c2ff1219fd4d59e898fa6b124"}, -] - -[package.dependencies] -microsoft-kiota_abstractions = ">=1.0.0,<2.0.0" -pendulum = ">=3.0.0b1" - -[[package]] -name = "microsoft-kiota-serialization-multipart" -version = "0.1.0" -description = "Implementation of Kiota Serialization Interfaces for Multipart serialization" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_serialization_multipart-0.1.0-py2.py3-none-any.whl", hash = "sha256:ef183902e77807806b8a181cdde53ba5bc04c6c9bdb2f7d80f8bad5d720e0015"}, - {file = "microsoft_kiota_serialization_multipart-0.1.0.tar.gz", hash = "sha256:14e89e92582e6630ddbc70ac67b70bf189dacbfc41a96d3e1d10339e86c8dde5"}, -] - -[package.dependencies] -microsoft-kiota_abstractions = ">=1.0.0,<2.0.0" - -[[package]] -name = "microsoft-kiota-serialization-text" -version = "1.0.0" -description = "Implementation of Kiota Serialization interfaces for text/plain" -optional = false -python-versions = "*" -files = [ - {file = "microsoft_kiota_serialization_text-1.0.0-py2.py3-none-any.whl", hash = "sha256:1d3789e012b603e059a36cc675d1fd08cb81e0dde423d970c0af2eabce9c0d43"}, - {file = "microsoft_kiota_serialization_text-1.0.0.tar.gz", hash = "sha256:c3dd3f409b1c4f4963bd1e41d51b65f7e53e852130bb441d79b77dad88ee76ed"}, -] - -[package.dependencies] -microsoft-kiota_abstractions = ">=1.0.0,<2.0.0" -python-dateutil = ">=2.8.2" - -[[package]] -name = "milvus" -version = "2.3.5" -description = "Embeded Milvus" -optional = false -python-versions = ">=3.6" -files = [ - {file = "milvus-2.3.5-py3-none-macosx_12_0_arm64.whl", hash = "sha256:328d2ba24fb04a595f47ab226abf5565691bfe242beb88e61b31326d0416bf1a"}, - {file = "milvus-2.3.5-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:e35a8d6397da1f0f685d0f55afad8654296ff3b3aea296439e53ce9980d1ad22"}, - {file = "milvus-2.3.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:69515a0630ce29fd10e101fa442afea8ca1387b93a456cd9bd41fdf3deb93d04"}, -] - -[package.extras] -client = ["pymilvus (>=2.3.0b1,<2.4.0)"] - -[[package]] -name = "milvus-lite" -version = "2.4.9" -description = "A lightweight version of Milvus wrapped with Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "milvus_lite-2.4.9-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:d3e617b3d68c09ad656d54bc3d8cc4ef6ef56c54015e1563d4fe4bcec6b7c90a"}, - {file = "milvus_lite-2.4.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6e7029282d6829b277ebb92f64e2370be72b938e34770e1eb649346bda5d1d7f"}, - {file = "milvus_lite-2.4.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9b8e991e4e433596f6a399a165c1a506f823ec9133332e03d7f8a114bff4550d"}, - {file = "milvus_lite-2.4.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:7f53e674602101cfbcf0a4a59d19eaa139dfd5580639f3040ad73d901f24fc0b"}, -] - -[package.dependencies] -tqdm = "*" - -[[package]] -name = "mistralai" -version = "0.4.2" -description = "" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "mistralai-0.4.2-py3-none-any.whl", hash = "sha256:63c98eea139585f0a3b2c4c6c09c453738bac3958055e6f2362d3866e96b0168"}, - {file = "mistralai-0.4.2.tar.gz", hash = "sha256:5eb656710517168ae053f9847b0bb7f617eda07f1f93f946ad6c91a4d407fd93"}, -] - -[package.dependencies] -httpx = ">=0.25,<1" -orjson = ">=3.9.10,<3.11" -pydantic = ">=2.5.2,<3" - -[[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] - -[[package]] -name = "mmh3" -version = "4.1.0" -description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." -optional = false -python-versions = "*" -files = [ - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, - {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, - {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, - {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, - {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, - {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, - {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, - {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, - {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, - {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, - {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, - {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, - {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, - {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, - {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, - {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, -] - -[package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] - -[[package]] -name = "monotonic" -version = "1.6" -description = "An implementation of time.monotonic() for Python 2 & < 3.3" -optional = false -python-versions = "*" -files = [ - {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, - {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, -] - -[[package]] -name = "more-itertools" -version = "10.4.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923"}, - {file = "more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27"}, -] - -[[package]] -name = "motor" -version = "3.5.1" -description = "Non-blocking MongoDB driver for Tornado or asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "motor-3.5.1-py3-none-any.whl", hash = "sha256:f95a9ea0f011464235e0bd72910baa291db3a6009e617ac27b82f57885abafb8"}, - {file = "motor-3.5.1.tar.gz", hash = "sha256:1622bd7b39c3e6375607c14736f6e1d498128eadf6f5f93f8786cf17d37062ac"}, -] - -[package.dependencies] -pymongo = ">=4.5,<5" - -[package.extras] -aws = ["pymongo[aws] (>=4.5,<5)"] -docs = ["aiohttp", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-rtd-theme (>=2,<3)", "tornado"] -encryption = ["pymongo[encryption] (>=4.5,<5)"] -gssapi = ["pymongo[gssapi] (>=4.5,<5)"] -ocsp = ["pymongo[ocsp] (>=4.5,<5)"] -snappy = ["pymongo[snappy] (>=4.5,<5)"] -test = ["aiohttp (!=3.8.6)", "mockupdb", "pymongo[encryption] (>=4.5,<5)", "pytest (>=7)", "tornado (>=5)"] -zstd = ["pymongo[zstd] (>=4.5,<5)"] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "msal" -version = "1.30.0" -description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." -optional = false -python-versions = ">=3.7" -files = [ - {file = "msal-1.30.0-py3-none-any.whl", hash = "sha256:423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de"}, - {file = "msal-1.30.0.tar.gz", hash = "sha256:b4bf00850092e465157d814efa24a18f788284c9a479491024d62903085ea2fb"}, -] - -[package.dependencies] -cryptography = ">=2.5,<45" -PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} -requests = ">=2.0.0,<3" - -[package.extras] -broker = ["pymsalruntime (>=0.13.2,<0.17)"] - -[[package]] -name = "msal-extensions" -version = "1.2.0" -description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." -optional = false -python-versions = ">=3.7" -files = [ - {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, - {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, -] - -[package.dependencies] -msal = ">=1.29,<2" -portalocker = ">=1.4,<3" - -[[package]] -name = "msgraph-core" -version = "1.1.2" -description = "Core component of the Microsoft Graph Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgraph_core-1.1.2-py3-none-any.whl", hash = "sha256:ed0695275d66914994a6ff71e7d71736ee4c4db3548a1021b2dd3a9605247def"}, - {file = "msgraph_core-1.1.2.tar.gz", hash = "sha256:c533cad1a23980487a4aa229dc5d9b00975fc6590e157e9f51046c6e80349288"}, -] - -[package.dependencies] -httpx = {version = ">=0.23.0", extras = ["http2"]} -microsoft-kiota-abstractions = ">=1.0.0,<2.0.0" -microsoft-kiota-authentication-azure = ">=1.0.0,<2.0.0" -microsoft-kiota-http = ">=1.0.0,<2.0.0" - -[package.extras] -dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"] - -[[package]] -name = "msgraph-sdk" -version = "1.5.4" -description = "The Microsoft Graph Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgraph_sdk-1.5.4-py3-none-any.whl", hash = "sha256:9ea349f30cc4a03edb587e26554c7a4839a38c2ef30d4b5396882fd2be82dcac"}, - {file = "msgraph_sdk-1.5.4.tar.gz", hash = "sha256:b0e146328d136d1db175938d8fc901f3bb32acf3ea6fe93c0dc7c5a0abc45e39"}, -] - -[package.dependencies] -azure-identity = ">=1.12.0" -microsoft-kiota-abstractions = ">=1.3.0,<2.0.0" -microsoft-kiota-authentication-azure = ">=1.0.0,<2.0.0" -microsoft-kiota-http = ">=1.0.0,<2.0.0" -microsoft-kiota-serialization-form = ">=0.1.0" -microsoft-kiota-serialization-json = ">=1.3.0,<2.0.0" -microsoft-kiota-serialization-multipart = ">=0.1.0" -microsoft-kiota-serialization-text = ">=1.0.0,<2.0.0" -msgraph_core = ">=1.0.0" - -[package.extras] -dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "mypy" -version = "1.11.1" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, - {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, - {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, - {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, - {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, - {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, - {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, - {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, - {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, - {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, - {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, - {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, - {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, - {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.4" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["pyqtwebengine (>=5.15)"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.1.3.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" -description = "CUDA Runtime native Libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "8.9.2.26" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.0.2.54" -description = "CUFFT native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.2.106" -description = "CURAND native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.4.5.107" -description = "CUDA solver native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.1.0.106" -description = "CUSPARSE native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.19.3" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.6.20" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_aarch64.whl", hash = "sha256:84fb38465a5bc7c70cbc320cfd0963eb302ee25a5e939e9f512bbba55b6072fb"}, - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl", hash = "sha256:562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79"}, - {file = "nvidia_nvjitlink_cu12-12.6.20-py3-none-win_amd64.whl", hash = "sha256:ed3c43a17f37b0c922a919203d2d36cbef24d41cc3e6b625182f8b58203644f6"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.1.105" -description = "NVIDIA Tools Extension" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, -] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "ollama" -version = "0.2.1" -description = "The official Python client for Ollama." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "ollama-0.2.1-py3-none-any.whl", hash = "sha256:b6e2414921c94f573a903d1069d682ba2fb2607070ea9e19ca4a7872f2a460ec"}, - {file = "ollama-0.2.1.tar.gz", hash = "sha256:fa316baa9a81eac3beb4affb0a17deb3008fdd6ed05b123c26306cfbe4c349b6"}, -] - -[package.dependencies] -httpx = ">=0.27.0,<0.28.0" - -[[package]] -name = "onnxruntime" -version = "1.18.1" -description = "ONNX Runtime is a runtime accelerator for Machine Learning models" -optional = false -python-versions = "*" -files = [ - {file = "onnxruntime-1.18.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:29ef7683312393d4ba04252f1b287d964bd67d5e6048b94d2da3643986c74d80"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc706eb1df06ddf55776e15a30519fb15dda7697f987a2bbda4962845e3cec05"}, - {file = "onnxruntime-1.18.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7de69f5ced2a263531923fa68bbec52a56e793b802fcd81a03487b5e292bc3a"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win32.whl", hash = "sha256:221e5b16173926e6c7de2cd437764492aa12b6811f45abd37024e7cf2ae5d7e3"}, - {file = "onnxruntime-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:75211b619275199c861ee94d317243b8a0fcde6032e5a80e1aa9ded8ab4c6060"}, - {file = "onnxruntime-1.18.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:f26582882f2dc581b809cfa41a125ba71ad9e715738ec6402418df356969774a"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef36f3a8b768506d02be349ac303fd95d92813ba3ba70304d40c3cd5c25d6a4c"}, - {file = "onnxruntime-1.18.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:170e711393e0618efa8ed27b59b9de0ee2383bd2a1f93622a97006a5ad48e434"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win32.whl", hash = "sha256:9b6a33419b6949ea34e0dc009bc4470e550155b6da644571ecace4b198b0d88f"}, - {file = "onnxruntime-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c1380a9f1b7788da742c759b6a02ba771fe1ce620519b2b07309decbd1a2fe1"}, - {file = "onnxruntime-1.18.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:31bd57a55e3f983b598675dfc7e5d6f0877b70ec9864b3cc3c3e1923d0a01919"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9e03c4ba9f734500691a4d7d5b381cd71ee2f3ce80a1154ac8f7aed99d1ecaa"}, - {file = "onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win32.whl", hash = "sha256:3a2d9ab6254ca62adbb448222e630dc6883210f718065063518c8f93a32432be"}, - {file = "onnxruntime-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ad93c560b1c38c27c0275ffd15cd7f45b3ad3fc96653c09ce2931179982ff204"}, - {file = "onnxruntime-1.18.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:3b55dc9d3c67626388958a3eb7ad87eb7c70f75cb0f7ff4908d27b8b42f2475c"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f80dbcfb6763cc0177a31168b29b4bd7662545b99a19e211de8c734b657e0669"}, - {file = "onnxruntime-1.18.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1ff2c61a16d6c8631796c54139bafea41ee7736077a0fc64ee8ae59432f5c58"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win32.whl", hash = "sha256:219855bd272fe0c667b850bf1a1a5a02499269a70d59c48e6f27f9c8bcb25d02"}, - {file = "onnxruntime-1.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdf16aa607eb9a2c60d5ca2d5abf9f448e90c345b6b94c3ed14f4fb7e6a2d07"}, - {file = "onnxruntime-1.18.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:128df253ade673e60cea0955ec9d0e89617443a6d9ce47c2d79eb3f72a3be3de"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9839491e77e5c5a175cab3621e184d5a88925ee297ff4c311b68897197f4cde9"}, - {file = "onnxruntime-1.18.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad3187c1faff3ac15f7f0e7373ef4788c582cafa655a80fdbb33eaec88976c66"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win32.whl", hash = "sha256:34657c78aa4e0b5145f9188b550ded3af626651b15017bf43d280d7e23dbf195"}, - {file = "onnxruntime-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:9c14fd97c3ddfa97da5feef595e2c73f14c2d0ec1d4ecbea99c8d96603c89589"}, -] - -[package.dependencies] -coloredlogs = "*" -flatbuffers = "*" -numpy = ">=1.21.6,<2.0" -packaging = "*" -protobuf = "*" -sympy = "*" - -[[package]] -name = "openai" -version = "1.41.1" -description = "The official Python library for the openai API" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "openai-1.41.1-py3-none-any.whl", hash = "sha256:56fb04105263f79559aff3ceea2e1dd16f8c5385e8238cb66cf0e6888fa8bfcf"}, - {file = "openai-1.41.1.tar.gz", hash = "sha256:e38e376efd91e0d4db071e2a6517b6b4cac1c2a6fd63efdc5ec6be10c5967c1b"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tqdm = ">4" -typing-extensions = ">=4.11,<5" - -[package.extras] -datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] - -[[package]] -name = "openapi-core" -version = "0.19.2" -description = "client-side and server-side support for the OpenAPI Specification v3" -optional = false -python-versions = "<4.0.0,>=3.8.0" -files = [ - {file = "openapi_core-0.19.2-py3-none-any.whl", hash = "sha256:b05f81031cc5b14f3a90b02f955d2ec756ccd5fba4f4e80bc4362520dac679a4"}, - {file = "openapi_core-0.19.2.tar.gz", hash = "sha256:db4e13dd3162d861d9485ae804f350586d9fd1d72808cdb264d6993d9b5ede3f"}, -] - -[package.dependencies] -isodate = "*" -jsonschema = ">=4.18.0,<5.0.0" -jsonschema-path = ">=0.3.1,<0.4.0" -more-itertools = "*" -openapi-schema-validator = ">=0.6.0,<0.7.0" -openapi-spec-validator = ">=0.7.1,<0.8.0" -parse = "*" -werkzeug = "*" - -[package.extras] -aiohttp = ["aiohttp (>=3.0)", "multidict (>=6.0.4,<7.0.0)"] -django = ["django (>=3.0)"] -falcon = ["falcon (>=3.0)"] -fastapi = ["fastapi (>=0.108.0,<0.109.0)"] -flask = ["flask"] -requests = ["requests"] -starlette = ["aioitertools (>=0.11.0,<0.12.0)", "starlette (>=0.26.1,<0.38.0)"] - -[[package]] -name = "openapi-schema-validator" -version = "0.6.2" -description = "OpenAPI schema validation for Python" -optional = false -python-versions = ">=3.8.0,<4.0.0" -files = [ - {file = "openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8"}, - {file = "openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804"}, -] - -[package.dependencies] -jsonschema = ">=4.19.1,<5.0.0" -jsonschema-specifications = ">=2023.5.2,<2024.0.0" -rfc3339-validator = "*" - -[[package]] -name = "openapi-spec-validator" -version = "0.7.1" -description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" -optional = false -python-versions = ">=3.8.0,<4.0.0" -files = [ - {file = "openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959"}, - {file = "openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7"}, -] - -[package.dependencies] -jsonschema = ">=4.18.0,<5.0.0" -jsonschema-path = ">=0.3.1,<0.4.0" -lazy-object-proxy = ">=1.7.1,<2.0.0" -openapi-schema-validator = ">=0.6.0,<0.7.0" - -[[package]] -name = "opentelemetry-api" -version = "1.26.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064"}, - {file = "opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=8.0.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.26.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl", hash = "sha256:ee4d8f8891a1b9c372abf8d109409e5b81947cf66423fd998e56880057afbc71"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.26.0.tar.gz", hash = "sha256:bdbe50e2e22a1c71acaa0c8ba6efaadd58882e5a5978737a44a4c4b10d304c92"}, -] - -[package.dependencies] -opentelemetry-proto = "1.26.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.26.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl", hash = "sha256:e2be5eff72ebcb010675b818e8d7c2e7d61ec451755b8de67a140bc49b9b0280"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.26.0.tar.gz", hash = "sha256:a65b67a9a6b06ba1ec406114568e21afe88c1cdb29c464f2507d529eb906d8ae"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.26.0" -opentelemetry-proto = "1.26.0" -opentelemetry-sdk = ">=1.26.0,<1.27.0" - -[[package]] -name = "opentelemetry-instrumentation" -version = "0.47b0" -description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation-0.47b0-py3-none-any.whl", hash = "sha256:88974ee52b1db08fc298334b51c19d47e53099c33740e48c4f084bd1afd052d5"}, - {file = "opentelemetry_instrumentation-0.47b0.tar.gz", hash = "sha256:96f9885e450c35e3f16a4f33145f2ebf620aea910c9fd74a392bbc0f807a350f"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.4,<2.0" -setuptools = ">=16.0" -wrapt = ">=1.0.0,<2.0.0" - -[[package]] -name = "opentelemetry-instrumentation-asgi" -version = "0.47b0" -description = "ASGI instrumentation for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation_asgi-0.47b0-py3-none-any.whl", hash = "sha256:b798dc4957b3edc9dfecb47a4c05809036a4b762234c5071212fda39ead80ade"}, - {file = "opentelemetry_instrumentation_asgi-0.47b0.tar.gz", hash = "sha256:e78b7822c1bca0511e5e9610ec484b8994a81670375e570c76f06f69af7c506a"}, -] - -[package.dependencies] -asgiref = ">=3.0,<4.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.47b0" -opentelemetry-semantic-conventions = "0.47b0" -opentelemetry-util-http = "0.47b0" - -[package.extras] -instruments = ["asgiref (>=3.0,<4.0)"] - -[[package]] -name = "opentelemetry-instrumentation-fastapi" -version = "0.47b0" -description = "OpenTelemetry FastAPI Instrumentation" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_instrumentation_fastapi-0.47b0-py3-none-any.whl", hash = "sha256:5ac28dd401160b02e4f544a85a9e4f61a8cbe5b077ea0379d411615376a2bd21"}, - {file = "opentelemetry_instrumentation_fastapi-0.47b0.tar.gz", hash = "sha256:0c7c10b5d971e99a420678ffd16c5b1ea4f0db3b31b62faf305fbb03b4ebee36"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.47b0" -opentelemetry-instrumentation-asgi = "0.47b0" -opentelemetry-semantic-conventions = "0.47b0" -opentelemetry-util-http = "0.47b0" - -[package.extras] -instruments = ["fastapi (>=0.58,<1.0)", "fastapi-slim (>=0.111.0,<0.112.0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.26.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_proto-1.26.0-py3-none-any.whl", hash = "sha256:6c4d7b4d4d9c88543bcf8c28ae3f8f0448a753dc291c18c5390444c90b76a725"}, - {file = "opentelemetry_proto-1.26.0.tar.gz", hash = "sha256:c5c18796c0cab3751fc3b98dee53855835e90c0422924b484432ac852d93dc1e"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.26.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897"}, - {file = "opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85"}, -] - -[package.dependencies] -opentelemetry-api = "1.26.0" -opentelemetry-semantic-conventions = "0.47b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.47b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063"}, - {file = "opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -opentelemetry-api = "1.26.0" - -[[package]] -name = "opentelemetry-util-http" -version = "0.47b0" -description = "Web util for OpenTelemetry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "opentelemetry_util_http-0.47b0-py3-none-any.whl", hash = "sha256:3d3215e09c4a723b12da6d0233a31395aeb2bb33a64d7b15a1500690ba250f19"}, - {file = "opentelemetry_util_http-0.47b0.tar.gz", hash = "sha256:352a07664c18eef827eb8ddcbd64c64a7284a39dd1655e2f16f577eb046ccb32"}, -] - -[[package]] -name = "orjson" -version = "3.10.7" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9"}, - {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91"}, - {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250"}, - {file = "orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84"}, - {file = "orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175"}, - {file = "orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c"}, - {file = "orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e"}, - {file = "orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6"}, - {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6"}, - {file = "orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0"}, - {file = "orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f"}, - {file = "orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5"}, - {file = "orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864"}, - {file = "orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09"}, - {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5"}, - {file = "orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b"}, - {file = "orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb"}, - {file = "orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1"}, - {file = "orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149"}, - {file = "orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe"}, - {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c"}, - {file = "orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad"}, - {file = "orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2"}, - {file = "orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024"}, - {file = "orjson-3.10.7-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6ea2b2258eff652c82652d5e0f02bd5e0463a6a52abb78e49ac288827aaa1469"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430ee4d85841e1483d487e7b81401785a5dfd69db5de01314538f31f8fbf7ee1"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b6146e439af4c2472c56f8540d799a67a81226e11992008cb47e1267a9b3225"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:084e537806b458911137f76097e53ce7bf5806dda33ddf6aaa66a028f8d43a23"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829cf2195838e3f93b70fd3b4292156fc5e097aac3739859ac0dcc722b27ac0"}, - {file = "orjson-3.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1193b2416cbad1a769f868b1749535d5da47626ac29445803dae7cc64b3f5c98"}, - {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4e6c3da13e5a57e4b3dca2de059f243ebec705857522f188f0180ae88badd354"}, - {file = "orjson-3.10.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c31008598424dfbe52ce8c5b47e0752dca918a4fdc4a2a32004efd9fab41d866"}, - {file = "orjson-3.10.7-cp38-none-win32.whl", hash = "sha256:7122a99831f9e7fe977dc45784d3b2edc821c172d545e6420c375e5a935f5a1c"}, - {file = "orjson-3.10.7-cp38-none-win_amd64.whl", hash = "sha256:a763bc0e58504cc803739e7df040685816145a6f3c8a589787084b54ebc9f16e"}, - {file = "orjson-3.10.7-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e76be12658a6fa376fcd331b1ea4e58f5a06fd0220653450f0d415b8fd0fbe20"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed350d6978d28b92939bfeb1a0570c523f6170efc3f0a0ef1f1df287cd4f4960"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:144888c76f8520e39bfa121b31fd637e18d4cc2f115727865fdf9fa325b10412"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09b2d92fd95ad2402188cf51573acde57eb269eddabaa60f69ea0d733e789fe9"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b24a579123fa884f3a3caadaed7b75eb5715ee2b17ab5c66ac97d29b18fe57f"}, - {file = "orjson-3.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591bcfe7512353bd609875ab38050efe3d55e18934e2f18950c108334b4ff"}, - {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f4db56635b58cd1a200b0a23744ff44206ee6aa428185e2b6c4a65b3197abdcd"}, - {file = "orjson-3.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0fa5886854673222618638c6df7718ea7fe2f3f2384c452c9ccedc70b4a510a5"}, - {file = "orjson-3.10.7-cp39-none-win32.whl", hash = "sha256:8272527d08450ab16eb405f47e0f4ef0e5ff5981c3d82afe0efd25dcbef2bcd2"}, - {file = "orjson-3.10.7-cp39-none-win_amd64.whl", hash = "sha256:974683d4618c0c7dbf4f69c95a979734bf183d0658611760017f6e70a145af58"}, - {file = "orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3"}, -] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parse" -version = "1.20.2" -description = "parse() is the opposite of format()" -optional = false -python-versions = "*" -files = [ - {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"}, - {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pathable" -version = "0.4.3" -description = "Object-oriented paths" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "pathable-0.4.3-py3-none-any.whl", hash = "sha256:cdd7b1f9d7d5c8b8d3315dbf5a86b2596053ae845f056f57d97c0eefff84da14"}, - {file = "pathable-0.4.3.tar.gz", hash = "sha256:5c869d315be50776cc8a993f3af43e0c60dc01506b399643f919034ebf4cdcab"}, -] - -[[package]] -name = "pendulum" -version = "3.0.0" -description = "Python datetimes made easy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pendulum-3.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2cf9e53ef11668e07f73190c805dbdf07a1939c3298b78d5a9203a86775d1bfd"}, - {file = "pendulum-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb551b9b5e6059377889d2d878d940fd0bbb80ae4810543db18e6f77b02c5ef6"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c58227ac260d5b01fc1025176d7b31858c9f62595737f350d22124a9a3ad82d"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60fb6f415fea93a11c52578eaa10594568a6716602be8430b167eb0d730f3332"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b69f6b4dbcb86f2c2fe696ba991e67347bcf87fe601362a1aba6431454b46bde"}, - {file = "pendulum-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138afa9c373ee450ede206db5a5e9004fd3011b3c6bbe1e57015395cd076a09f"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:83d9031f39c6da9677164241fd0d37fbfc9dc8ade7043b5d6d62f56e81af8ad2"}, - {file = "pendulum-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0c2308af4033fa534f089595bcd40a95a39988ce4059ccd3dc6acb9ef14ca44a"}, - {file = "pendulum-3.0.0-cp310-none-win_amd64.whl", hash = "sha256:9a59637cdb8462bdf2dbcb9d389518c0263799189d773ad5c11db6b13064fa79"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3725245c0352c95d6ca297193192020d1b0c0f83d5ee6bb09964edc2b5a2d508"}, - {file = "pendulum-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6c035f03a3e565ed132927e2c1b691de0dbf4eb53b02a5a3c5a97e1a64e17bec"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597e66e63cbd68dd6d58ac46cb7a92363d2088d37ccde2dae4332ef23e95cd00"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99a0f8172e19f3f0c0e4ace0ad1595134d5243cf75985dc2233e8f9e8de263ca"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d8839e20f54706aed425bec82a83b4aec74db07f26acd039905d1237a5e1d4"}, - {file = "pendulum-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afde30e8146292b059020fbc8b6f8fd4a60ae7c5e6f0afef937bbb24880bdf01"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:660434a6fcf6303c4efd36713ca9212c753140107ee169a3fc6c49c4711c2a05"}, - {file = "pendulum-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dee9e5a48c6999dc1106eb7eea3e3a50e98a50651b72c08a87ee2154e544b33e"}, - {file = "pendulum-3.0.0-cp311-none-win_amd64.whl", hash = "sha256:d4cdecde90aec2d67cebe4042fd2a87a4441cc02152ed7ed8fb3ebb110b94ec4"}, - {file = "pendulum-3.0.0-cp311-none-win_arm64.whl", hash = "sha256:773c3bc4ddda2dda9f1b9d51fe06762f9200f3293d75c4660c19b2614b991d83"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:409e64e41418c49f973d43a28afe5df1df4f1dd87c41c7c90f1a63f61ae0f1f7"}, - {file = "pendulum-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38ad2121c5ec7c4c190c7334e789c3b4624798859156b138fcc4d92295835dc"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fde4d0b2024b9785f66b7f30ed59281bd60d63d9213cda0eb0910ead777f6d37"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b2c5675769fb6d4c11238132962939b960fcb365436b6d623c5864287faa319"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8af95e03e066826f0f4c65811cbee1b3123d4a45a1c3a2b4fc23c4b0dff893b5"}, - {file = "pendulum-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2165a8f33cb15e06c67070b8afc87a62b85c5a273e3aaa6bc9d15c93a4920d6f"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ad5e65b874b5e56bd942546ea7ba9dd1d6a25121db1c517700f1c9de91b28518"}, - {file = "pendulum-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17fe4b2c844bbf5f0ece69cfd959fa02957c61317b2161763950d88fed8e13b9"}, - {file = "pendulum-3.0.0-cp312-none-win_amd64.whl", hash = "sha256:78f8f4e7efe5066aca24a7a57511b9c2119f5c2b5eb81c46ff9222ce11e0a7a5"}, - {file = "pendulum-3.0.0-cp312-none-win_arm64.whl", hash = "sha256:28f49d8d1e32aae9c284a90b6bb3873eee15ec6e1d9042edd611b22a94ac462f"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d4e2512f4e1a4670284a153b214db9719eb5d14ac55ada5b76cbdb8c5c00399d"}, - {file = "pendulum-3.0.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3d897eb50883cc58d9b92f6405245f84b9286cd2de6e8694cb9ea5cb15195a32"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e169cc2ca419517f397811bbe4589cf3cd13fca6dc38bb352ba15ea90739ebb"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17c3084a4524ebefd9255513692f7e7360e23c8853dc6f10c64cc184e1217ab"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826d6e258052715f64d05ae0fc9040c0151e6a87aae7c109ba9a0ed930ce4000"}, - {file = "pendulum-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2aae97087872ef152a0c40e06100b3665d8cb86b59bc8471ca7c26132fccd0f"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac65eeec2250d03106b5e81284ad47f0d417ca299a45e89ccc69e36130ca8bc7"}, - {file = "pendulum-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a5346d08f3f4a6e9e672187faa179c7bf9227897081d7121866358af369f44f9"}, - {file = "pendulum-3.0.0-cp37-none-win_amd64.whl", hash = "sha256:235d64e87946d8f95c796af34818c76e0f88c94d624c268693c85b723b698aa9"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:6a881d9c2a7f85bc9adafcfe671df5207f51f5715ae61f5d838b77a1356e8b7b"}, - {file = "pendulum-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7762d2076b9b1cb718a6631ad6c16c23fc3fac76cbb8c454e81e80be98daa34"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e8e36a8130819d97a479a0e7bf379b66b3b1b520e5dc46bd7eb14634338df8c"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dc843253ac373358ffc0711960e2dd5b94ab67530a3e204d85c6e8cb2c5fa10"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a78ad3635d609ceb1e97d6aedef6a6a6f93433ddb2312888e668365908c7120"}, - {file = "pendulum-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a137e9e0d1f751e60e67d11fc67781a572db76b2296f7b4d44554761049d6"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c95984037987f4a457bb760455d9ca80467be792236b69d0084f228a8ada0162"}, - {file = "pendulum-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29c6e578fe0f893766c0d286adbf0b3c726a4e2341eba0917ec79c50274ec16"}, - {file = "pendulum-3.0.0-cp38-none-win_amd64.whl", hash = "sha256:deaba8e16dbfcb3d7a6b5fabdd5a38b7c982809567479987b9c89572df62e027"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b11aceea5b20b4b5382962b321dbc354af0defe35daa84e9ff3aae3c230df694"}, - {file = "pendulum-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a90d4d504e82ad236afac9adca4d6a19e4865f717034fc69bafb112c320dcc8f"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:825799c6b66e3734227756fa746cc34b3549c48693325b8b9f823cb7d21b19ac"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad769e98dc07972e24afe0cff8d365cb6f0ebc7e65620aa1976fcfbcadc4c6f3"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6fc26907eb5fb8cc6188cc620bc2075a6c534d981a2f045daa5f79dfe50d512"}, - {file = "pendulum-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c717eab1b6d898c00a3e0fa7781d615b5c5136bbd40abe82be100bb06df7a56"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3ddd1d66d1a714ce43acfe337190be055cdc221d911fc886d5a3aae28e14b76d"}, - {file = "pendulum-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:822172853d7a9cf6da95d7b66a16c7160cb99ae6df55d44373888181d7a06edc"}, - {file = "pendulum-3.0.0-cp39-none-win_amd64.whl", hash = "sha256:840de1b49cf1ec54c225a2a6f4f0784d50bd47f68e41dc005b7f67c7d5b5f3ae"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b1f74d1e6ffe5d01d6023870e2ce5c2191486928823196f8575dcc786e107b1"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:729e9f93756a2cdfa77d0fc82068346e9731c7e884097160603872686e570f07"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e586acc0b450cd21cbf0db6bae386237011b75260a3adceddc4be15334689a9a"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22e7944ffc1f0099a79ff468ee9630c73f8c7835cd76fdb57ef7320e6a409df4"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fa30af36bd8e50686846bdace37cf6707bdd044e5cb6e1109acbad3277232e04"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:440215347b11914ae707981b9a57ab9c7b6983ab0babde07063c6ee75c0dc6e7"}, - {file = "pendulum-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:314c4038dc5e6a52991570f50edb2f08c339debdf8cea68ac355b32c4174e820"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5acb1d386337415f74f4d1955c4ce8d0201978c162927d07df8eb0692b2d8533"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a789e12fbdefaffb7b8ac67f9d8f22ba17a3050ceaaa635cd1cc4645773a4b1e"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:860aa9b8a888e5913bd70d819306749e5eb488e6b99cd6c47beb701b22bdecf5"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5ebc65ea033ef0281368217fbf59f5cb05b338ac4dd23d60959c7afcd79a60a0"}, - {file = "pendulum-3.0.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9fef18ab0386ef6a9ac7bad7e43ded42c83ff7ad412f950633854f90d59afa8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c134ba2f0571d0b68b83f6972e2307a55a5a849e7dac8505c715c531d2a8795"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:385680812e7e18af200bb9b4a49777418c32422d05ad5a8eb85144c4a285907b"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eec91cd87c59fb32ec49eb722f375bd58f4be790cae11c1b70fac3ee4f00da0"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4386bffeca23c4b69ad50a36211f75b35a4deb6210bdca112ac3043deb7e494a"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dfbcf1661d7146d7698da4b86e7f04814221081e9fe154183e34f4c5f5fa3bf8"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:04a1094a5aa1daa34a6b57c865b25f691848c61583fb22722a4df5699f6bf74c"}, - {file = "pendulum-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5b0ec85b9045bd49dd3a3493a5e7ddfd31c36a2a60da387c419fa04abcaecb23"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0a15b90129765b705eb2039062a6daf4d22c4e28d1a54fa260892e8c3ae6e157"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:bb8f6d7acd67a67d6fedd361ad2958ff0539445ef51cbe8cd288db4306503cd0"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd69b15374bef7e4b4440612915315cc42e8575fcda2a3d7586a0d88192d0c88"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc00f8110db6898360c53c812872662e077eaf9c75515d53ecc65d886eec209a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:83a44e8b40655d0ba565a5c3d1365d27e3e6778ae2a05b69124db9e471255c4a"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1a3604e9fbc06b788041b2a8b78f75c243021e0f512447806a6d37ee5214905d"}, - {file = "pendulum-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:92c307ae7accebd06cbae4729f0ba9fa724df5f7d91a0964b1b972a22baa482b"}, - {file = "pendulum-3.0.0.tar.gz", hash = "sha256:5d034998dea404ec31fae27af6b22cff1708f830a1ed7353be4d1019bb9f584e"}, -] - -[package.dependencies] -python-dateutil = ">=2.6" -tzdata = ">=2020.1" - -[package.extras] -test = ["time-machine (>=2.6.0)"] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pillow" -version = "10.4.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "pinecone-client" -version = "5.0.1" -description = "Pinecone client and SDK" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "pinecone_client-5.0.1-py3-none-any.whl", hash = "sha256:c8f7835e1045ba84e295f217a8e85573ffb80b41501bbc1af6d92c9631c567a7"}, - {file = "pinecone_client-5.0.1.tar.gz", hash = "sha256:11c33ff5d1c38a6ce69e69fe532c0f22f312fb28d761bb30b3767816d3181d64"}, -] - -[package.dependencies] -certifi = ">=2019.11.17" -pinecone-plugin-inference = ">=1.0.3,<2.0.0" -pinecone-plugin-interface = ">=0.0.7,<0.0.8" -tqdm = ">=4.64.1" -typing-extensions = ">=3.7.4" -urllib3 = [ - {version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, - {version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, -] - -[package.extras] -grpc = ["googleapis-common-protos (>=1.53.0)", "grpcio (>=1.44.0)", "grpcio (>=1.59.0)", "lz4 (>=3.1.3)", "protobuf (>=4.25,<5.0)", "protoc-gen-openapiv2 (>=0.0.1,<0.0.2)"] - -[[package]] -name = "pinecone-plugin-inference" -version = "1.0.3" -description = "Embeddings plugin for Pinecone SDK" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "pinecone_plugin_inference-1.0.3-py3-none-any.whl", hash = "sha256:bbdfe5dba99a87374d9e3315b62b8e1bbca52d5fe069a64cd6b212efbc8b9afd"}, - {file = "pinecone_plugin_inference-1.0.3.tar.gz", hash = "sha256:c6519ba730123713a181c010f0db9d6449d11de451b8e79bec4efd662b096f41"}, -] - -[package.dependencies] -pinecone-plugin-interface = ">=0.0.7,<0.0.8" - -[[package]] -name = "pinecone-plugin-interface" -version = "0.0.7" -description = "Plugin interface for the Pinecone python client" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "pinecone_plugin_interface-0.0.7-py3-none-any.whl", hash = "sha256:875857ad9c9fc8bbc074dbe780d187a2afd21f5bfe0f3b08601924a61ef1bba8"}, - {file = "pinecone_plugin_interface-0.0.7.tar.gz", hash = "sha256:b8e6675e41847333aa13923cc44daa3f85676d7157324682dc1640588a982846"}, -] - -[[package]] -name = "platformdirs" -version = "4.2.2" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "portalocker" -version = "2.10.1" -description = "Wraps the portalocker recipe for easy usage" -optional = false -python-versions = ">=3.8" -files = [ - {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, - {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, -] - -[package.dependencies] -pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} - -[package.extras] -docs = ["sphinx (>=1.7.1)"] -redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] - -[[package]] -name = "posthog" -version = "3.5.0" -description = "Integrate PostHog into any python application." -optional = false -python-versions = "*" -files = [ - {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, - {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, -] - -[package.dependencies] -backoff = ">=1.10.0" -monotonic = ">=1.5" -python-dateutil = ">2.1" -requests = ">=2.7,<3.0" -six = ">=1.5" - -[package.extras] -dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] -sentry = ["django", "sentry-sdk"] -test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] - -[[package]] -name = "prance" -version = "23.6.21.0" -description = "Resolving Swagger/OpenAPI 2.0 and 3.0.0 Parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "prance-23.6.21.0-py3-none-any.whl", hash = "sha256:6a4276fa07ed9f22feda4331097d7503c4adc3097e46ffae97425f2c1026bd9f"}, - {file = "prance-23.6.21.0.tar.gz", hash = "sha256:d8c15f8ac34019751cc4945f866d8d964d7888016d10de3592e339567177cabe"}, -] - -[package.dependencies] -chardet = ">=3.0" -packaging = ">=21.3" -requests = ">=2.25" -"ruamel.yaml" = ">=0.17.10" -six = ">=1.15,<2.0" - -[package.extras] -cli = ["click (>=7.0)"] -dev = ["bumpversion (>=0.6)", "pytest (>=6.1)", "pytest-cov (>=2.11)", "sphinx (>=3.4)", "towncrier (>=19.2)", "tox (>=3.4)"] -flex = ["flex (>=6.13,<7.0)"] -icu = ["PyICU (>=2.4,<3.0)"] -osv = ["openapi-spec-validator (>=0.5.1,<0.6.0)"] -ssv = ["swagger-spec-validator (>=2.4,<3.0)"] - -[[package]] -name = "pre-commit" -version = "3.8.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, - {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "prompt-toolkit" -version = "3.0.47" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "proto-plus" -version = "1.24.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" - -[package.extras] -testing = ["google-api-core (>=1.31.5)"] - -[[package]] -name = "protobuf" -version = "4.25.4" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, -] - -[[package]] -name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "psycopg" -version = "3.2.1" -description = "PostgreSQL database adapter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg-3.2.1-py3-none-any.whl", hash = "sha256:ece385fb413a37db332f97c49208b36cf030ff02b199d7635ed2fbd378724175"}, - {file = "psycopg-3.2.1.tar.gz", hash = "sha256:dc8da6dc8729dacacda3cc2f17d2c9397a70a66cf0d2b69c91065d60d5f00cb7"}, -] - -[package.dependencies] -psycopg-binary = {version = "3.2.1", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} -typing-extensions = ">=4.4" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -binary = ["psycopg-binary (==3.2.1)"] -c = ["psycopg-c (==3.2.1)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.6)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.6)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] - -[[package]] -name = "psycopg-binary" -version = "3.2.1" -description = "PostgreSQL database adapter for Python -- C optimisation distribution" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:cad2de17804c4cfee8640ae2b279d616bb9e4734ac3c17c13db5e40982bd710d"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:592b27d6c46a40f9eeaaeea7c1fef6f3c60b02c634365eb649b2d880669f149f"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a997efbaadb5e1a294fb5760e2f5643d7b8e4e3fe6cb6f09e6d605fd28e0291"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1d2b6438fb83376f43ebb798bf0ad5e57bc56c03c9c29c85bc15405c8c0ac5a"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1f087bd84bdcac78bf9f024ebdbfacd07fc0a23ec8191448a50679e2ac4a19e"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:415c3b72ea32119163255c6504085f374e47ae7345f14bc3f0ef1f6e0976a879"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f092114f10f81fb6bae544a0ec027eb720e2d9c74a4fcdaa9dd3899873136935"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06a7aae34edfe179ddc04da005e083ff6c6b0020000399a2cbf0a7121a8a22ea"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b018631e5c80ce9bc210b71ea885932f9cca6db131e4df505653d7e3873a938"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f8a509aeaac364fa965454e80cd110fe6d48ba2c80f56c9b8563423f0b5c3cfd"}, - {file = "psycopg_binary-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:413977d18412ff83486eeb5875eb00b185a9391c57febac45b8993bf9c0ff489"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62b1b7b07e00ee490afb39c0a47d8282a9c2822c7cfed9553a04b0058adf7e7f"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8afb07114ea9b924a4a0305ceb15354ccf0ef3c0e14d54b8dbeb03e50182dd7"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bb515d042f6a345714ec0403df68ccf13f73b05e567837d80c886c7c9d3805"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6418712ba63cebb0c88c050b3997185b0ef54173b36568522d5634ac06153040"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:101472468d59c74bb8565fab603e032803fd533d16be4b2d13da1bab8deb32a3"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3931f308ab4a479d0ee22dc04bea867a6365cac0172e5ddcba359da043854b"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dc314a47d44fe1a8069b075a64abffad347a3a1d8652fed1bab5d3baea37acb2"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc304a46be1e291031148d9d95c12451ffe783ff0cc72f18e2cc7ec43cdb8c68"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f9e13600647087df5928875559f0eb8f496f53e6278b7da9511b4b3d0aff960"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b140182830c76c74d17eba27df3755a46442ce8d4fb299e7f1cf2f74a87c877b"}, - {file = "psycopg_binary-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:3c838806eeb99af39f934b7999e35f947a8e577997cc892c12b5053a97a9057f"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7066d3dca196ed0dc6172f9777b2d62e4f138705886be656cccff2d555234d60"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:28ada5f610468c57d8a4a055a8ea915d0085a43d794266c4f3b9d02f4288f4db"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8213bf50af073b1aa8dc3cff123bfeedac86332a16c1b7274910bc88a847c7"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74d623261655a169bc84a9669890975c229f2fa6e19a7f2d10a77675dcf1a707"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42781ba94e8842ee98bca5a7d0c44cc9d067500fedca2d6a90fa3609b6d16b42"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e6669091d09f8ba36e10ce678a6d9916e110446236a9b92346464a3565635e"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b09e8a576a2ac69d695032ee76f31e03b30781828b5dd6d18c6a009e5a3d1c35"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8f28ff0cb9f1defdc4a6f8c958bf6787274247e7dfeca811f6e2f56602695fb1"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4c84fcac8a3a3479ac14673095cc4e1fdba2935499f72c436785ac679bec0d1a"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:950fd666ec9e9fe6a8eeb2b5a8f17301790e518953730ad44d715b59ffdbc67f"}, - {file = "psycopg_binary-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:334046a937bb086c36e2c6889fe327f9f29bfc085d678f70fac0b0618949f674"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:1d6833f607f3fc7b22226a9e121235d3b84c0eda1d3caab174673ef698f63788"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d353e028b8f848b9784450fc2abf149d53a738d451eab3ee4c85703438128b9"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f34e369891f77d0738e5d25727c307d06d5344948771e5379ea29c76c6d84555"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ab58213cc976a1666f66bc1cb2e602315cd753b7981a8e17237ac2a185bd4a1"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0104a72a17aa84b3b7dcab6c84826c595355bf54bb6ea6d284dcb06d99c6801"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:059cbd4e6da2337e17707178fe49464ed01de867dc86c677b30751755ec1dc51"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:73f9c9b984be9c322b5ec1515b12df1ee5896029f5e72d46160eb6517438659c"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:af0469c00f24c4bec18c3d2ede124bf62688d88d1b8a5f3c3edc2f61046fe0d7"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:463d55345f73ff391df8177a185ad57b552915ad33f5cc2b31b930500c068b22"}, - {file = "psycopg_binary-3.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:302b86f92c0d76e99fe1b5c22c492ae519ce8b98b88d37ef74fda4c9e24c6b46"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0879b5d76b7d48678d31278242aaf951bc2d69ca4e4d7cef117e4bbf7bfefda9"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f99e59f8a5f4dcd9cbdec445f3d8ac950a492fc0e211032384d6992ed3c17eb7"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84837e99353d16c6980603b362d0f03302d4b06c71672a6651f38df8a482923d"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ce965caf618061817f66c0906f0452aef966c293ae0933d4fa5a16ea6eaf5bb"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78c2007caf3c90f08685c5378e3ceb142bafd5636be7495f7d86ec8a977eaeef"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7a84b5eb194a258116154b2a4ff2962ea60ea52de089508db23a51d3d6b1c7d1"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4a42b8f9ab39affcd5249b45cac763ac3cf12df962b67e23fd15a2ee2932afe5"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:788ffc43d7517c13e624c83e0e553b7b8823c9655e18296566d36a829bfb373f"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:21927f41c4d722ae8eb30d62a6ce732c398eac230509af5ba1749a337f8a63e2"}, - {file = "psycopg_binary-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:921f0c7f39590763d64a619de84d1b142587acc70fd11cbb5ba8fa39786f3073"}, -] - -[[package]] -name = "psycopg-pool" -version = "3.2.2" -description = "Connection Pool for Psycopg" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg_pool-3.2.2-py3-none-any.whl", hash = "sha256:273081d0fbfaced4f35e69200c89cb8fbddfe277c38cc86c235b90a2ec2c8153"}, - {file = "psycopg_pool-3.2.2.tar.gz", hash = "sha256:9e22c370045f6d7f2666a5ad1b0caf345f9f1912195b0b25d0d3bcc4f3a7389c"}, -] - -[package.dependencies] -typing-extensions = ">=4.4" - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pyarrow" -version = "17.0.0" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, - {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, - {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, - {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, - {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, - {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, - {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, - {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, - {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, - {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, - {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, - {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, - {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, - {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, - {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, - {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, - {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, -] - -[package.dependencies] -numpy = ">=1.16.6" - -[package.extras] -test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] - -[[package]] -name = "pyasn1" -version = "0.6.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pybars4" -version = "0.9.13" -description = "Handlebars.js templating for Python 3" -optional = false -python-versions = "*" -files = [ - {file = "pybars4-0.9.13.tar.gz", hash = "sha256:425817da20d4ad320bc9b8e77a60cab1bb9d3c677df3dce224925c3310fcd635"}, -] - -[package.dependencies] -PyMeta3 = ">=0.5.1" - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.8.2" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" -typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.20.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydantic-settings" -version = "2.4.0" -description = "Settings management using Pydantic" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, -] - -[package.dependencies] -pydantic = ">=2.7.0" -python-dotenv = ">=0.21.0" - -[package.extras] -azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] -toml = ["tomli (>=2.0.1)"] -yaml = ["pyyaml (>=6.0.1)"] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.9.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pymeta3" -version = "0.5.1" -description = "Pattern-matching language based on OMeta for Python 3 and 2" -optional = false -python-versions = "*" -files = [ - {file = "PyMeta3-0.5.1.tar.gz", hash = "sha256:18bda326d9a9bbf587bfc0ee0bc96864964d78b067288bcf55d4d98681d05bcb"}, -] - -[[package]] -name = "pymilvus" -version = "2.4.5" -description = "Python Sdk for Milvus" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pymilvus-2.4.5-py3-none-any.whl", hash = "sha256:dc4f2d1eac8db9cf3951de39566a1a244695760bb94d8310fbfc73d6d62bb267"}, - {file = "pymilvus-2.4.5.tar.gz", hash = "sha256:1a497fe9b41d6bf62b1d5e1c412960922dde1598576fcbb8818040c8af11149f"}, -] - -[package.dependencies] -environs = "<=9.5.0" -grpcio = ">=1.49.1,<=1.63.0" -milvus-lite = {version = ">=2.4.0,<2.5.0", markers = "sys_platform != \"win32\""} -pandas = ">=1.2.4" -protobuf = ">=3.20.0" -setuptools = ">69" -ujson = ">=2.0.0" - -[package.extras] -bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "requests"] -dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] -model = ["milvus-model (>=0.1.0)"] - -[[package]] -name = "pymongo" -version = "4.8.0" -description = "Python driver for MongoDB " -optional = false -python-versions = ">=3.8" -files = [ - {file = "pymongo-4.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2b7bec27e047e84947fbd41c782f07c54c30c76d14f3b8bf0c89f7413fac67a"}, - {file = "pymongo-4.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c68fe128a171493018ca5c8020fc08675be130d012b7ab3efe9e22698c612a1"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920d4f8f157a71b3cb3f39bc09ce070693d6e9648fb0e30d00e2657d1dca4e49"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b4108ac9469febba18cea50db972605cc43978bedaa9fea413378877560ef8"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:180d5eb1dc28b62853e2f88017775c4500b07548ed28c0bd9c005c3d7bc52526"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aec2b9088cdbceb87e6ca9c639d0ff9b9d083594dda5ca5d3c4f6774f4c81b33"}, - {file = "pymongo-4.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0cf61450feadca81deb1a1489cb1a3ae1e4266efd51adafecec0e503a8dcd84"}, - {file = "pymongo-4.8.0-cp310-cp310-win32.whl", hash = "sha256:8b18c8324809539c79bd6544d00e0607e98ff833ca21953df001510ca25915d1"}, - {file = "pymongo-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e5df28f74002e37bcbdfdc5109799f670e4dfef0fb527c391ff84f078050e7b5"}, - {file = "pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68"}, - {file = "pymongo-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:417369ce39af2b7c2a9c7152c1ed2393edfd1cbaf2a356ba31eb8bcbd5c98dd7"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9365166aa801c63dff1a3cb96e650be270da06e3464ab106727223123405510f"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc8b8582f4209c2459b04b049ac03c72c618e011d3caa5391ff86d1bda0cc486"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e5019f75f6827bb5354b6fef8dfc9d6c7446894a27346e03134d290eb9e758"}, - {file = "pymongo-4.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b5802151fc2b51cd45492c80ed22b441d20090fb76d1fd53cd7760b340ff554"}, - {file = "pymongo-4.8.0-cp311-cp311-win32.whl", hash = "sha256:4bf58e6825b93da63e499d1a58de7de563c31e575908d4e24876234ccb910eba"}, - {file = "pymongo-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b747c0e257b9d3e6495a018309b9e0c93b7f0d65271d1d62e572747f4ffafc88"}, - {file = "pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526"}, - {file = "pymongo-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31e4d21201bdf15064cf47ce7b74722d3e1aea2597c6785882244a3bb58c7eab"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2fbdb87fe5075c8beb17a5c16348a1ea3c8b282a5cb72d173330be2fecf22f5"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd39455b7ee70aabee46f7399b32ab38b86b236c069ae559e22be6b46b2bbfc4"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8"}, - {file = "pymongo-4.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:236bbd7d0aef62e64caf4b24ca200f8c8670d1a6f5ea828c39eccdae423bc2b2"}, - {file = "pymongo-4.8.0-cp312-cp312-win32.whl", hash = "sha256:47ec8c3f0a7b2212dbc9be08d3bf17bc89abd211901093e3ef3f2adea7de7a69"}, - {file = "pymongo-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8"}, - {file = "pymongo-4.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:519d1bab2b5e5218c64340b57d555d89c3f6c9d717cecbf826fb9d42415e7750"}, - {file = "pymongo-4.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87075a1feb1e602e539bdb1ef8f4324a3427eb0d64208c3182e677d2c0718b6f"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f53429515d2b3e86dcc83dadecf7ff881e538c168d575f3688698a8707b80a"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdc20cd1e1141b04696ffcdb7c71e8a4a665db31fe72e51ec706b3bdd2d09f36"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:284d0717d1a7707744018b0b6ee7801b1b1ff044c42f7be7a01bb013de639470"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bf0eb8b6ef40fa22479f09375468c33bebb7fe49d14d9c96c8fd50355188b0"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ecd71b9226bd1d49416dc9f999772038e56f415a713be51bf18d8676a0841c8"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0061af6e8c5e68b13f1ec9ad5251247726653c5af3c0bbdfbca6cf931e99216"}, - {file = "pymongo-4.8.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:658d0170f27984e0d89c09fe5c42296613b711a3ffd847eb373b0dbb5b648d5f"}, - {file = "pymongo-4.8.0-cp38-cp38-win32.whl", hash = "sha256:3ed1c316718a2836f7efc3d75b4b0ffdd47894090bc697de8385acd13c513a70"}, - {file = "pymongo-4.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:7148419eedfea9ecb940961cfe465efaba90595568a1fb97585fb535ea63fe2b"}, - {file = "pymongo-4.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8400587d594761e5136a3423111f499574be5fd53cf0aefa0d0f05b180710b0"}, - {file = "pymongo-4.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af3e98dd9702b73e4e6fd780f6925352237f5dce8d99405ff1543f3771201704"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de3a860f037bb51f968de320baef85090ff0bbb42ec4f28ec6a5ddf88be61871"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fc18b3a093f3db008c5fea0e980dbd3b743449eee29b5718bc2dc15ab5088bb"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18c9d8f975dd7194c37193583fd7d1eb9aea0c21ee58955ecf35362239ff31ac"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:408b2f8fdbeca3c19e4156f28fff1ab11c3efb0407b60687162d49f68075e63c"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6564780cafd6abeea49759fe661792bd5a67e4f51bca62b88faab497ab5fe89"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d18d86bc9e103f4d3d4f18b85a0471c0e13ce5b79194e4a0389a224bb70edd53"}, - {file = "pymongo-4.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9097c331577cecf8034422956daaba7ec74c26f7b255d718c584faddd7fa2e3c"}, - {file = "pymongo-4.8.0-cp39-cp39-win32.whl", hash = "sha256:d5428dbcd43d02f6306e1c3c95f692f68b284e6ee5390292242f509004c9e3a8"}, - {file = "pymongo-4.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:ef7225755ed27bfdb18730c68f6cb023d06c28f2b734597480fb4c0e500feb6f"}, - {file = "pymongo-4.8.0.tar.gz", hash = "sha256:454f2295875744dc70f1881e4b2eb99cdad008a33574bc8aaf120530f66c0cde"}, -] - -[package.dependencies] -dnspython = ">=1.16.0,<3.0.0" - -[package.extras] -aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] -docs = ["furo (==2023.9.10)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-rtd-theme (>=2,<3)", "sphinxcontrib-shellcheck (>=1,<2)"] -encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.6.0,<2.0.0)"] -gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] -ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] -snappy = ["python-snappy"] -test = ["pytest (>=7)"] -zstd = ["zstandard"] - -[[package]] -name = "pyparsing" -version = "3.1.2" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pypika" -version = "0.48.9" -description = "A SQL query builder API for Python" -optional = false -python-versions = "*" -files = [ - {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, -] - -[[package]] -name = "pyproject-hooks" -version = "1.1.0" -description = "Wrappers to call pyproject.toml-based build backend hooks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"}, - {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"}, -] - -[[package]] -name = "pyreadline3" -version = "3.4.1" -description = "A python implementation of GNU readline." -optional = false -python-versions = "*" -files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, -] - -[[package]] -name = "pytest" -version = "8.3.2" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.8" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-cov" -version = "5.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, -] - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] - -[[package]] -name = "pytest-xdist" -version = "3.6.1" -description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, - {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, -] - -[package.dependencies] -execnet = ">=2.1" -psutil = {version = ">=3.0", optional = true, markers = "extra == \"psutil\""} -pytest = ">=7.0.0" - -[package.extras] -psutil = ["psutil (>=3.0)"] -setproctitle = ["setproctitle"] -testing = ["filelock"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.1.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, - {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, - {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, - {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, - {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, - {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, - {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, - {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, - {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, - {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, - {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, - {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, - {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, - {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, - {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, - {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d416f2088ac8f12daacffbc2e8918ef4d6be8568e9d7155c83b7cebed49d2322"}, - {file = "pyzmq-26.1.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ecb6c88d7946166d783a635efc89f9a1ff11c33d680a20df9657b6902a1d133b"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:471312a7375571857a089342beccc1a63584315188560c7c0da7e0a23afd8a5c"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6cea102ffa16b737d11932c426f1dc14b5938cf7bc12e17269559c458ac334"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec7248673ffc7104b54e4957cee38b2f3075a13442348c8d651777bf41aa45ee"}, - {file = "pyzmq-26.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0614aed6f87d550b5cecb03d795f4ddbb1544b78d02a4bd5eecf644ec98a39f6"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8746ce968be22a8a1801bf4a23e565f9687088580c3ed07af5846580dd97f76"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:7688653574392d2eaeef75ddcd0b2de5b232d8730af29af56c5adf1df9ef8d6f"}, - {file = "pyzmq-26.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d4dac7d97f15c653a5fedcafa82626bd6cee1450ccdaf84ffed7ea14f2b07a4"}, - {file = "pyzmq-26.1.0-cp313-cp313-win32.whl", hash = "sha256:ccb42ca0a4a46232d716779421bbebbcad23c08d37c980f02cc3a6bd115ad277"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e1e5d0a25aea8b691a00d6b54b28ac514c8cc0d8646d05f7ca6cb64b97358250"}, - {file = "pyzmq-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:fc82269d24860cfa859b676d18850cbb8e312dcd7eada09e7d5b007e2f3d9eb1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:416ac51cabd54f587995c2b05421324700b22e98d3d0aa2cfaec985524d16f1d"}, - {file = "pyzmq-26.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:ff832cce719edd11266ca32bc74a626b814fff236824aa1aeaad399b69fe6eae"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:393daac1bcf81b2a23e696b7b638eedc965e9e3d2112961a072b6cd8179ad2eb"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9869fa984c8670c8ab899a719eb7b516860a29bc26300a84d24d8c1b71eae3ec"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b3b8e36fd4c32c0825b4461372949ecd1585d326802b1321f8b6dc1d7e9318c"}, - {file = "pyzmq-26.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3ee647d84b83509b7271457bb428cc347037f437ead4b0b6e43b5eba35fec0aa"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:45cb1a70eb00405ce3893041099655265fabcd9c4e1e50c330026e82257892c1"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:5cca7b4adb86d7470e0fc96037771981d740f0b4cb99776d5cb59cd0e6684a73"}, - {file = "pyzmq-26.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:91d1a20bdaf3b25f3173ff44e54b1cfbc05f94c9e8133314eb2962a89e05d6e3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c0665d85535192098420428c779361b8823d3d7ec4848c6af3abb93bc5c915bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96d7c1d35ee4a495df56c50c83df7af1c9688cce2e9e0edffdbf50889c167595"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b281b5ff5fcc9dcbfe941ac5c7fcd4b6c065adad12d850f95c9d6f23c2652384"}, - {file = "pyzmq-26.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5384c527a9a004445c5074f1e20db83086c8ff1682a626676229aafd9cf9f7d1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:754c99a9840839375ee251b38ac5964c0f369306eddb56804a073b6efdc0cd88"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9bdfcb74b469b592972ed881bad57d22e2c0acc89f5e8c146782d0d90fb9f4bf"}, - {file = "pyzmq-26.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bd13f0231f4788db619347b971ca5f319c5b7ebee151afc7c14632068c6261d3"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win32.whl", hash = "sha256:c5668dac86a869349828db5fc928ee3f58d450dce2c85607067d581f745e4fb1"}, - {file = "pyzmq-26.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad875277844cfaeca7fe299ddf8c8d8bfe271c3dc1caf14d454faa5cdbf2fa7a"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:65c6e03cc0222eaf6aad57ff4ecc0a070451e23232bb48db4322cc45602cede0"}, - {file = "pyzmq-26.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:038ae4ffb63e3991f386e7fda85a9baab7d6617fe85b74a8f9cab190d73adb2b"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bdeb2c61611293f64ac1073f4bf6723b67d291905308a7de9bb2ca87464e3273"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:61dfa5ee9d7df297c859ac82b1226d8fefaf9c5113dc25c2c00ecad6feeeb04f"}, - {file = "pyzmq-26.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3292d384537b9918010769b82ab3e79fca8b23d74f56fc69a679106a3e2c2cf"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f9499c70c19ff0fbe1007043acb5ad15c1dec7d8e84ab429bca8c87138e8f85c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3dd5523ed258ad58fed7e364c92a9360d1af8a9371e0822bd0146bdf017ef4c"}, - {file = "pyzmq-26.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baba2fd199b098c5544ef2536b2499d2e2155392973ad32687024bd8572a7d1c"}, - {file = "pyzmq-26.1.0-cp38-cp38-win32.whl", hash = "sha256:ddbb2b386128d8eca92bd9ca74e80f73fe263bcca7aa419f5b4cbc1661e19741"}, - {file = "pyzmq-26.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:79e45a4096ec8388cdeb04a9fa5e9371583bcb826964d55b8b66cbffe7b33c86"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:add52c78a12196bc0fda2de087ba6c876ea677cbda2e3eba63546b26e8bf177b"}, - {file = "pyzmq-26.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c03bd7f3339ff47de7ea9ac94a2b34580a8d4df69b50128bb6669e1191a895"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dcc37d9d708784726fafc9c5e1232de655a009dbf97946f117aefa38d5985a0f"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a6ed52f0b9bf8dcc64cc82cce0607a3dfed1dbb7e8c6f282adfccc7be9781de"}, - {file = "pyzmq-26.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451e16ae8bea3d95649317b463c9f95cd9022641ec884e3d63fc67841ae86dfe"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:906e532c814e1d579138177a00ae835cd6becbf104d45ed9093a3aaf658f6a6a"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05bacc4f94af468cc82808ae3293390278d5f3375bb20fef21e2034bb9a505b6"}, - {file = "pyzmq-26.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:57bb2acba798dc3740e913ffadd56b1fcef96f111e66f09e2a8db3050f1f12c8"}, - {file = "pyzmq-26.1.0-cp39-cp39-win32.whl", hash = "sha256:f774841bb0e8588505002962c02da420bcfb4c5056e87a139c6e45e745c0e2e2"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:359c533bedc62c56415a1f5fcfd8279bc93453afdb0803307375ecf81c962402"}, - {file = "pyzmq-26.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:7907419d150b19962138ecec81a17d4892ea440c184949dc29b358bc730caf69"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, - {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bef24d3e4ae2c985034439f449e3f9e06bf579974ce0e53d8a507a1577d5b2ab"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2cd0f4d314f4a2518e8970b6f299ae18cff7c44d4a1fc06fc713f791c3a9e3ea"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fa25a620eed2a419acc2cf10135b995f8f0ce78ad00534d729aa761e4adcef8a"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef3b048822dca6d231d8a8ba21069844ae38f5d83889b9b690bf17d2acc7d099"}, - {file = "pyzmq-26.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9a6847c92d9851b59b9f33f968c68e9e441f9a0f8fc972c5580c5cd7cbc6ee24"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9b9305004d7e4e6a824f4f19b6d8f32b3578aad6f19fc1122aaf320cbe3dc83"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:63c1d3a65acb2f9c92dce03c4e1758cc552f1ae5c78d79a44e3bb88d2fa71f3a"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d36b8fffe8b248a1b961c86fbdfa0129dfce878731d169ede7fa2631447331be"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67976d12ebfd61a3bc7d77b71a9589b4d61d0422282596cf58c62c3866916544"}, - {file = "pyzmq-26.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:998444debc8816b5d8d15f966e42751032d0f4c55300c48cc337f2b3e4f17d03"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5c88b2f13bcf55fee78ea83567b9fe079ba1a4bef8b35c376043440040f7edb"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d906d43e1592be4b25a587b7d96527cb67277542a5611e8ea9e996182fae410"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b0c9942430d731c786545da6be96d824a41a51742e3e374fedd9018ea43106"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:314d11564c00b77f6224d12eb3ddebe926c301e86b648a1835c5b28176c83eab"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:093a1a3cae2496233f14b57f4b485da01b4ff764582c854c0f42c6dd2be37f3d"}, - {file = "pyzmq-26.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c397b1b450f749a7e974d74c06d69bd22dd362142f370ef2bd32a684d6b480c"}, - {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qdrant-client" -version = "1.11.0" -description = "Client library for the Qdrant vector search engine" -optional = false -python-versions = ">=3.8" -files = [ - {file = "qdrant_client-1.11.0-py3-none-any.whl", hash = "sha256:1f574ccebb91c0bc8a620c9a41a5a010084fbc4d8c6f1cd0ab7b2eeb97336fc0"}, - {file = "qdrant_client-1.11.0.tar.gz", hash = "sha256:7c1d4d7a96cfd1ee0cde2a21c607e9df86bcca795ad8d1fd274d295ab64b8458"}, -] - -[package.dependencies] -grpcio = ">=1.41.0" -grpcio-tools = ">=1.41.0" -httpx = {version = ">=0.20.0", extras = ["http2"]} -numpy = [ - {version = ">=1.21", markers = "python_version >= \"3.8\" and python_version < \"3.12\""}, - {version = ">=1.26", markers = "python_version >= \"3.12\""}, -] -portalocker = ">=2.7.0,<3.0.0" -pydantic = ">=1.10.8" -urllib3 = ">=1.26.14,<3" - -[package.extras] -fastembed = ["fastembed (==0.3.4)"] -fastembed-gpu = ["fastembed-gpu (==0.3.4)"] - -[[package]] -name = "redis" -version = "5.0.8" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} -hiredis = {version = ">1.0.0", optional = true, markers = "extra == \"hiredis\""} - -[package.extras] -hiredis = ["hiredis (>1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2024.7.24" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -files = [ - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, - {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, - {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, - {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, - {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, - {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, - {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, - {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, - {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, - {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, - {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, - {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.20.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, -] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruamel-yaml" -version = "0.18.6" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} - -[package.extras] -docs = ["mercurial (>5.7)", "ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] - -[[package]] -name = "ruff" -version = "0.5.7" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, - {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, - {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, - {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, - {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, - {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, - {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, -] - -[[package]] -name = "safetensors" -version = "0.4.4" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "safetensors-0.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2adb497ada13097f30e386e88c959c0fda855a5f6f98845710f5bb2c57e14f12"}, - {file = "safetensors-0.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7db7fdc2d71fd1444d85ca3f3d682ba2df7d61a637dfc6d80793f439eae264ab"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4f0eed76b430f009fbefca1a0028ddb112891b03cb556d7440d5cd68eb89a9"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d216fab0b5c432aabf7170883d7c11671622bde8bd1436c46d633163a703f6"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d9b76322e49c056bcc819f8bdca37a2daa5a6d42c07f30927b501088db03309"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32f0d1f6243e90ee43bc6ee3e8c30ac5b09ca63f5dd35dbc985a1fc5208c451a"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d464bdc384874601a177375028012a5f177f1505279f9456fea84bbc575c7f"}, - {file = "safetensors-0.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63144e36209ad8e4e65384dbf2d52dd5b1866986079c00a72335402a38aacdc5"}, - {file = "safetensors-0.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:051d5ecd490af7245258000304b812825974d5e56f14a3ff7e1b8b2ba6dc2ed4"}, - {file = "safetensors-0.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51bc8429d9376224cd3cf7e8ce4f208b4c930cd10e515b6ac6a72cbc3370f0d9"}, - {file = "safetensors-0.4.4-cp310-none-win32.whl", hash = "sha256:fb7b54830cee8cf9923d969e2df87ce20e625b1af2fd194222ab902d3adcc29c"}, - {file = "safetensors-0.4.4-cp310-none-win_amd64.whl", hash = "sha256:4b3e8aa8226d6560de8c2b9d5ff8555ea482599c670610758afdc97f3e021e9c"}, - {file = "safetensors-0.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bbaa31f2cb49013818bde319232ccd72da62ee40f7d2aa532083eda5664e85ff"}, - {file = "safetensors-0.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fdcb80f4e9fbb33b58e9bf95e7dbbedff505d1bcd1c05f7c7ce883632710006"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55c14c20be247b8a1aeaf3ab4476265e3ca83096bb8e09bb1a7aa806088def4f"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:949aaa1118660f992dbf0968487b3e3cfdad67f948658ab08c6b5762e90cc8b6"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c11a4ab7debc456326a2bac67f35ee0ac792bcf812c7562a4a28559a5c795e27"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cea44bba5c5601b297bc8307e4075535b95163402e4906b2e9b82788a2a6df"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9d752c97f6bbe327352f76e5b86442d776abc789249fc5e72eacb49e6916482"}, - {file = "safetensors-0.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03f2bb92e61b055ef6cc22883ad1ae898010a95730fa988c60a23800eb742c2c"}, - {file = "safetensors-0.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf3f91a9328a941acc44eceffd4e1f5f89b030985b2966637e582157173b98"}, - {file = "safetensors-0.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:20d218ec2b6899d29d6895419a58b6e44cc5ff8f0cc29fac8d236a8978ab702e"}, - {file = "safetensors-0.4.4-cp311-none-win32.whl", hash = "sha256:8079486118919f600c603536e2490ca37b3dbd3280e3ad6eaacfe6264605ac8a"}, - {file = "safetensors-0.4.4-cp311-none-win_amd64.whl", hash = "sha256:2f8c2eb0615e2e64ee27d478c7c13f51e5329d7972d9e15528d3e4cfc4a08f0d"}, - {file = "safetensors-0.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:baec5675944b4a47749c93c01c73d826ef7d42d36ba8d0dba36336fa80c76426"}, - {file = "safetensors-0.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f15117b96866401825f3e94543145028a2947d19974429246ce59403f49e77c6"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a13a9caea485df164c51be4eb0c87f97f790b7c3213d635eba2314d959fe929"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b54bc4ca5f9b9bba8cd4fb91c24b2446a86b5ae7f8975cf3b7a277353c3127c"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08332c22e03b651c8eb7bf5fc2de90044f3672f43403b3d9ac7e7e0f4f76495e"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb62841e839ee992c37bb75e75891c7f4904e772db3691c59daaca5b4ab960e1"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5b927acc5f2f59547270b0309a46d983edc44be64e1ca27a7fcb0474d6cd67"}, - {file = "safetensors-0.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a69c71b1ae98a8021a09a0b43363b0143b0ce74e7c0e83cacba691b62655fb8"}, - {file = "safetensors-0.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23654ad162c02a5636f0cd520a0310902c4421aab1d91a0b667722a4937cc445"}, - {file = "safetensors-0.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0677c109d949cf53756859160b955b2e75b0eefe952189c184d7be30ecf7e858"}, - {file = "safetensors-0.4.4-cp312-none-win32.whl", hash = "sha256:a51d0ddd4deb8871c6de15a772ef40b3dbd26a3c0451bb9e66bc76fc5a784e5b"}, - {file = "safetensors-0.4.4-cp312-none-win_amd64.whl", hash = "sha256:2d065059e75a798bc1933c293b68d04d79b586bb7f8c921e0ca1e82759d0dbb1"}, - {file = "safetensors-0.4.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9d625692578dd40a112df30c02a1adf068027566abd8e6a74893bb13d441c150"}, - {file = "safetensors-0.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7cabcf39c81e5b988d0adefdaea2eb9b4fd9bd62d5ed6559988c62f36bfa9a89"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8359bef65f49d51476e9811d59c015f0ddae618ee0e44144f5595278c9f8268c"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a32c662e7df9226fd850f054a3ead0e4213a96a70b5ce37b2d26ba27004e013"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c329a4dcc395364a1c0d2d1574d725fe81a840783dda64c31c5a60fc7d41472c"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:239ee093b1db877c9f8fe2d71331a97f3b9c7c0d3ab9f09c4851004a11f44b65"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd574145d930cf9405a64f9923600879a5ce51d9f315443a5f706374841327b6"}, - {file = "safetensors-0.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6784eed29f9e036acb0b7769d9e78a0dc2c72c2d8ba7903005350d817e287a4"}, - {file = "safetensors-0.4.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:65a4a6072436bf0a4825b1c295d248cc17e5f4651e60ee62427a5bcaa8622a7a"}, - {file = "safetensors-0.4.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:df81e3407630de060ae8313da49509c3caa33b1a9415562284eaf3d0c7705f9f"}, - {file = "safetensors-0.4.4-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:e4a0f374200e8443d9746e947ebb346c40f83a3970e75a685ade0adbba5c48d9"}, - {file = "safetensors-0.4.4-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:181fb5f3dee78dae7fd7ec57d02e58f7936498d587c6b7c1c8049ef448c8d285"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb4ac1d8f6b65ec84ddfacd275079e89d9df7c92f95675ba96c4f790a64df6e"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76897944cd9239e8a70955679b531b9a0619f76e25476e57ed373322d9c2075d"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a9e9d1a27e51a0f69e761a3d581c3af46729ec1c988fa1f839e04743026ae35"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:005ef9fc0f47cb9821c40793eb029f712e97278dae84de91cb2b4809b856685d"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26987dac3752688c696c77c3576f951dbbdb8c57f0957a41fb6f933cf84c0b62"}, - {file = "safetensors-0.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c05270b290acd8d249739f40d272a64dd597d5a4b90f27d830e538bc2549303c"}, - {file = "safetensors-0.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:068d3a33711fc4d93659c825a04480ff5a3854e1d78632cdc8f37fee917e8a60"}, - {file = "safetensors-0.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:063421ef08ca1021feea8b46951251b90ae91f899234dd78297cbe7c1db73b99"}, - {file = "safetensors-0.4.4-cp37-none-win32.whl", hash = "sha256:d52f5d0615ea83fd853d4e1d8acf93cc2e0223ad4568ba1e1f6ca72e94ea7b9d"}, - {file = "safetensors-0.4.4-cp37-none-win_amd64.whl", hash = "sha256:88a5ac3280232d4ed8e994cbc03b46a1807ce0aa123867b40c4a41f226c61f94"}, - {file = "safetensors-0.4.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3467ab511bfe3360967d7dc53b49f272d59309e57a067dd2405b4d35e7dcf9dc"}, - {file = "safetensors-0.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ab4c96d922e53670ce25fbb9b63d5ea972e244de4fa1dd97b590d9fd66aacef"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87df18fce4440477c3ef1fd7ae17c704a69a74a77e705a12be135ee0651a0c2d"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e5fe345b2bc7d88587149ac11def1f629d2671c4c34f5df38aed0ba59dc37f8"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f1a3e01dce3cd54060791e7e24588417c98b941baa5974700eeb0b8eb65b0a0"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6bf35e9a8998d8339fd9a05ac4ce465a4d2a2956cc0d837b67c4642ed9e947"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:166c0c52f6488b8538b2a9f3fbc6aad61a7261e170698779b371e81b45f0440d"}, - {file = "safetensors-0.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87e9903b8668a16ef02c08ba4ebc91e57a49c481e9b5866e31d798632805014b"}, - {file = "safetensors-0.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9c421153aa23c323bd8483d4155b4eee82c9a50ac11cccd83539104a8279c64"}, - {file = "safetensors-0.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a4b8617499b2371c7353302c5116a7e0a3a12da66389ce53140e607d3bf7b3d3"}, - {file = "safetensors-0.4.4-cp38-none-win32.whl", hash = "sha256:c6280f5aeafa1731f0a3709463ab33d8e0624321593951aefada5472f0b313fd"}, - {file = "safetensors-0.4.4-cp38-none-win_amd64.whl", hash = "sha256:6ceed6247fc2d33b2a7b7d25d8a0fe645b68798856e0bc7a9800c5fd945eb80f"}, - {file = "safetensors-0.4.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5cf6c6f6193797372adf50c91d0171743d16299491c75acad8650107dffa9269"}, - {file = "safetensors-0.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:419010156b914a3e5da4e4adf992bee050924d0fe423c4b329e523e2c14c3547"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88f6fd5a5c1302ce79993cc5feeadcc795a70f953c762544d01fb02b2db4ea33"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d468cffb82d90789696d5b4d8b6ab8843052cba58a15296691a7a3df55143cd2"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9353c2af2dd467333d4850a16edb66855e795561cd170685178f706c80d2c71e"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83c155b4a33368d9b9c2543e78f2452090fb030c52401ca608ef16fa58c98353"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9850754c434e636ce3dc586f534bb23bcbd78940c304775bee9005bf610e98f1"}, - {file = "safetensors-0.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:275f500b4d26f67b6ec05629a4600645231bd75e4ed42087a7c1801bff04f4b3"}, - {file = "safetensors-0.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5c2308de665b7130cd0e40a2329278226e4cf083f7400c51ca7e19ccfb3886f3"}, - {file = "safetensors-0.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e06a9ebc8656e030ccfe44634f2a541b4b1801cd52e390a53ad8bacbd65f8518"}, - {file = "safetensors-0.4.4-cp39-none-win32.whl", hash = "sha256:ef73df487b7c14b477016947c92708c2d929e1dee2bacdd6fff5a82ed4539537"}, - {file = "safetensors-0.4.4-cp39-none-win_amd64.whl", hash = "sha256:83d054818a8d1198d8bd8bc3ea2aac112a2c19def2bf73758321976788706398"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1d1f34c71371f0e034004a0b583284b45d233dd0b5f64a9125e16b8a01d15067"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a8043a33d58bc9b30dfac90f75712134ca34733ec3d8267b1bd682afe7194f5"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db8f0c59c84792c12661f8efa85de160f80efe16b87a9d5de91b93f9e0bce3c"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfc1fc38e37630dd12d519bdec9dcd4b345aec9930bb9ce0ed04461f49e58b52"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c9d86d9b13b18aafa88303e2cd21e677f5da2a14c828d2c460fe513af2e9a5"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:43251d7f29a59120a26f5a0d9583b9e112999e500afabcfdcb91606d3c5c89e3"}, - {file = "safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2c42e9b277513b81cf507e6121c7b432b3235f980cac04f39f435b7902857f91"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3daacc9a4e3f428a84dd56bf31f20b768eb0b204af891ed68e1f06db9edf546f"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218bbb9b883596715fc9997bb42470bf9f21bb832c3b34c2bf744d6fa8f2bbba"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bd5efc26b39f7fc82d4ab1d86a7f0644c8e34f3699c33f85bfa9a717a030e1b"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56ad9776b65d8743f86698a1973292c966cf3abff627efc44ed60e66cc538ddd"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:30f23e6253c5f43a809dea02dc28a9f5fa747735dc819f10c073fe1b605e97d4"}, - {file = "safetensors-0.4.4-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5512078d00263de6cb04e9d26c9ae17611098f52357fea856213e38dc462f81f"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b96c3d9266439d17f35fc2173111d93afc1162f168e95aed122c1ca517b1f8f1"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:08d464aa72a9a13826946b4fb9094bb4b16554bbea2e069e20bd903289b6ced9"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:210160816d5a36cf41f48f38473b6f70d7bcb4b0527bedf0889cc0b4c3bb07db"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb276a53717f2bcfb6df0bcf284d8a12069002508d4c1ca715799226024ccd45"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2c28c6487f17d8db0089e8b2cdc13de859366b94cc6cdc50e1b0a4147b56551"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7915f0c60e4e6e65d90f136d85dd3b429ae9191c36b380e626064694563dbd9f"}, - {file = "safetensors-0.4.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:00eea99ae422fbfa0b46065acbc58b46bfafadfcec179d4b4a32d5c45006af6c"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb1ed4fcb0b3c2f3ea2c5767434622fe5d660e5752f21ac2e8d737b1e5e480bb"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:73fc9a0a4343188bdb421783e600bfaf81d0793cd4cce6bafb3c2ed567a74cd5"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c37e6b714200824c73ca6eaf007382de76f39466a46e97558b8dc4cf643cfbf"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75698c5c5c542417ac4956acfc420f7d4a2396adca63a015fd66641ea751759"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca1a209157f242eb183e209040097118472e169f2e069bfbd40c303e24866543"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:177f2b60a058f92a3cec7a1786c9106c29eca8987ecdfb79ee88126e5f47fa31"}, - {file = "safetensors-0.4.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ee9622e84fe6e4cd4f020e5fda70d6206feff3157731df7151d457fdae18e541"}, - {file = "safetensors-0.4.4.tar.gz", hash = "sha256:5fe3e9b705250d0172ed4e100a811543108653fb2b66b9e702a088ad03772a07"}, -] - -[package.extras] -all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] -dev = ["safetensors[all]"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] -mlx = ["mlx (>=0.0.9)"] -numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] -pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] -quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] -torch = ["safetensors[numpy]", "torch (>=1.10)"] - -[[package]] -name = "scikit-learn" -version = "1.5.1" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745"}, - {file = "scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac"}, - {file = "scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21"}, - {file = "scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2"}, - {file = "scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4"}, - {file = "scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf"}, - {file = "scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395"}, - {file = "scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915"}, - {file = "scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b"}, - {file = "scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88e0672c7ac21eb149d409c74cc29f1d611d5158175846e7a9c2427bd12b3956"}, - {file = "scikit_learn-1.5.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:7b073a27797a283187a4ef4ee149959defc350b46cbf63a84d8514fe16b69855"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b59e3e62d2be870e5c74af4e793293753565c7383ae82943b83383fdcf5cc5c1"}, - {file = "scikit_learn-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd8d3a19d4bd6dc5a7d4f358c8c3a60934dc058f363c34c0ac1e9e12a31421d"}, - {file = "scikit_learn-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f57428de0c900a98389c4a433d4a3cf89de979b3aa24d1c1d251802aa15e44d"}, - {file = "scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=3.1.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] -maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.14.0" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.10" -files = [ - {file = "scipy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7"}, - {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1"}, - {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0"}, - {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0"}, - {file = "scipy-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d"}, - {file = "scipy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf"}, - {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86"}, - {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8"}, - {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74"}, - {file = "scipy-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb"}, - {file = "scipy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0"}, - {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f"}, - {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209"}, - {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14"}, - {file = "scipy-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159"}, - {file = "scipy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20"}, - {file = "scipy-1.14.0.tar.gz", hash = "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b"}, -] - -[package.dependencies] -numpy = ">=1.23.5,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "sentence-transformers" -version = "2.7.0" -description = "Multilingual text embeddings" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "sentence_transformers-2.7.0-py3-none-any.whl", hash = "sha256:6a7276b05a95931581bbfa4ba49d780b2cf6904fa4a171ec7fd66c343f761c98"}, - {file = "sentence_transformers-2.7.0.tar.gz", hash = "sha256:2f7df99d1c021dded471ed2d079e9d1e4fc8e30ecb06f957be060511b36f24ea"}, -] - -[package.dependencies] -huggingface-hub = ">=0.15.1" -numpy = "*" -Pillow = "*" -scikit-learn = "*" -scipy = "*" -torch = ">=1.11.0" -tqdm = "*" -transformers = ">=4.34.0,<5.0.0" - -[package.extras] -dev = ["pre-commit", "pytest", "ruff (>=0.3.0)"] - -[[package]] -name = "setuptools" -version = "72.1.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, -] - -[package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shapely" -version = "2.0.5" -description = "Manipulation and analysis of geometric objects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"}, - {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"}, - {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"}, - {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"}, - {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"}, - {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"}, - {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"}, - {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"}, - {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"}, - {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"}, - {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"}, - {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"}, - {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"}, - {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"}, - {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"}, - {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"}, -] - -[package.dependencies] -numpy = ">=1.14,<3" - -[package.extras] -docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snoop" -version = "0.4.3" -description = "Powerful debugging tools for Python" -optional = false -python-versions = "*" -files = [ - {file = "snoop-0.4.3-py2.py3-none-any.whl", hash = "sha256:b7418581889ff78b29d9dc5ad4625c4c475c74755fb5cba82c693c6e32afadc0"}, - {file = "snoop-0.4.3.tar.gz", hash = "sha256:2e0930bb19ff0dbdaa6f5933f88e89ed5984210ea9f9de0e1d8231fa5c1c1f25"}, -] - -[package.dependencies] -asttokens = "*" -cheap-repr = ">=0.4.0" -executing = "*" -pygments = "*" -six = "*" - -[package.extras] -tests = ["Django", "birdseye", "littleutils", "numpy (>=1.16.5)", "pandas (>=0.24.2)", "pprintpp", "prettyprinter", "pytest", "pytest-order", "pytest-order (<=0.11.0)"] - -[[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "starlette" -version = "0.37.2" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "std-uritemplate" -version = "1.0.5" -description = "std-uritemplate implementation for Python" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "std_uritemplate-1.0.5-py3-none-any.whl", hash = "sha256:8daf745b350ef3bc7b4ef82460a6c48aa459ca65fce8bda8657178959e3832d7"}, - {file = "std_uritemplate-1.0.5.tar.gz", hash = "sha256:6ea31e72f96ab2b54d93c774de2175ce5350a833fbf7c024bb3718a3a539f605"}, -] - -[[package]] -name = "sympy" -version = "1.13.2" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, - {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, -] - -[package.dependencies] -mpmath = ">=1.1.0,<1.4" - -[package.extras] -dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"] - -[[package]] -name = "tenacity" -version = "9.0.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "threadpoolctl" -version = "3.5.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, - {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, -] - -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - -[[package]] -name = "tokenizers" -version = "0.19.1" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, - {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, - {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, - {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, - {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, - {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, - {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, - {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, - {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, - {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, - {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, - {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, - {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, - {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, - {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, -] - -[package.dependencies] -huggingface-hub = ">=0.16.4,<1.0" - -[package.extras] -dev = ["tokenizers[testing]"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "torch" -version = "2.2.2" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.2.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:bc889d311a855dd2dfd164daf8cc903a6b7273a747189cebafdd89106e4ad585"}, - {file = "torch-2.2.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:15dffa4cc3261fa73d02f0ed25f5fa49ecc9e12bf1ae0a4c1e7a88bbfaad9030"}, - {file = "torch-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:11e8fe261233aeabd67696d6b993eeb0896faa175c6b41b9a6c9f0334bdad1c5"}, - {file = "torch-2.2.2-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:b2e2200b245bd9f263a0d41b6a2dab69c4aca635a01b30cca78064b0ef5b109e"}, - {file = "torch-2.2.2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:877b3e6593b5e00b35bbe111b7057464e76a7dd186a287280d941b564b0563c2"}, - {file = "torch-2.2.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:ad4c03b786e074f46606f4151c0a1e3740268bcf29fbd2fdf6666d66341c1dcb"}, - {file = "torch-2.2.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:32827fa1fbe5da8851686256b4cd94cc7b11be962862c2293811c94eea9457bf"}, - {file = "torch-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:f9ef0a648310435511e76905f9b89612e45ef2c8b023bee294f5e6f7e73a3e7c"}, - {file = "torch-2.2.2-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:95b9b44f3bcebd8b6cd8d37ec802048c872d9c567ba52c894bba90863a439059"}, - {file = "torch-2.2.2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:49aa4126ede714c5aeef7ae92969b4b0bbe67f19665106463c39f22e0a1860d1"}, - {file = "torch-2.2.2-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:cf12cdb66c9c940227ad647bc9cf5dba7e8640772ae10dfe7569a0c1e2a28aca"}, - {file = "torch-2.2.2-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:89ddac2a8c1fb6569b90890955de0c34e1724f87431cacff4c1979b5f769203c"}, - {file = "torch-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:451331406b760f4b1ab298ddd536486ab3cfb1312614cfe0532133535be60bea"}, - {file = "torch-2.2.2-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:eb4d6e9d3663e26cd27dc3ad266b34445a16b54908e74725adb241aa56987533"}, - {file = "torch-2.2.2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:bf9558da7d2bf7463390b3b2a61a6a3dbb0b45b161ee1dd5ec640bf579d479fc"}, - {file = "torch-2.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd2bf7697c9e95fb5d97cc1d525486d8cf11a084c6af1345c2c2c22a6b0029d0"}, - {file = "torch-2.2.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b421448d194496e1114d87a8b8d6506bce949544e513742b097e2ab8f7efef32"}, - {file = "torch-2.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:3dbcd563a9b792161640c0cffe17e3270d85e8f4243b1f1ed19cca43d28d235b"}, - {file = "torch-2.2.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:31f4310210e7dda49f1fb52b0ec9e59382cfcb938693f6d5378f25b43d7c1d29"}, - {file = "torch-2.2.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:c795feb7e8ce2e0ef63f75f8e1ab52e7fd5e1a4d7d0c31367ade1e3de35c9e95"}, - {file = "torch-2.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a6e5770d68158d07456bfcb5318b173886f579fdfbf747543901ce718ea94782"}, - {file = "torch-2.2.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:67dcd726edff108e2cd6c51ff0e416fd260c869904de95750e80051358680d24"}, - {file = "torch-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:539d5ef6c4ce15bd3bd47a7b4a6e7c10d49d4d21c0baaa87c7d2ef8698632dfb"}, - {file = "torch-2.2.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:dff696de90d6f6d1e8200e9892861fd4677306d0ef604cb18f2134186f719f82"}, - {file = "torch-2.2.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:3a4dd910663fd7a124c056c878a52c2b0be4a5a424188058fe97109d4436ee42"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -sympy = "*" -triton = {version = "2.2.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""} -typing-extensions = ">=4.8.0" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.9.1)"] - -[[package]] -name = "tornado" -version = "6.4.1" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.3" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "transformers" -version = "4.44.0" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "transformers-4.44.0-py3-none-any.whl", hash = "sha256:ea0ff72def71e9f4812d9414d4803b22681b1617aa6f511bd51cfff2b44a6fca"}, - {file = "transformers-4.44.0.tar.gz", hash = "sha256:75699495e30b7635ca444d8d372e138c687ab51a875b387e33f1fb759c37f196"}, -] - -[package.dependencies] -accelerate = {version = ">=0.21.0", optional = true, markers = "extra == \"torch\""} -filelock = "*" -huggingface-hub = ">=0.23.2,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -safetensors = ">=0.4.1" -tokenizers = ">=0.19,<0.20" -torch = {version = "*", optional = true, markers = "extra == \"torch\""} -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.21.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -benchmark = ["optimum-benchmark (>=0.2.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6,<0.15.0)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.5.1)", "urllib3 (<2.0.0)"] -ray = ["ray[tune] (>=2.7.0)"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -ruff = ["ruff (==0.5.1)"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.5.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm (<=0.9.16)"] -tokenizers = ["tokenizers (>=0.19,<0.20)"] -torch = ["accelerate (>=0.21.0)", "torch"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.19,<0.20)", "torch", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (>=10.0.1,<=15.0)"] - -[[package]] -name = "triton" -version = "2.2.0" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "*" -files = [ - {file = "triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5"}, - {file = "triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da58a152bddb62cafa9a857dd2bc1f886dbf9f9c90a2b5da82157cd2b34392b0"}, - {file = "triton-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af58716e721460a61886668b205963dc4d1e4ac20508cc3f623aef0d70283d5"}, - {file = "triton-2.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8fe46d3ab94a8103e291bd44c741cc294b91d1d81c1a2888254cbf7ff846dab"}, - {file = "triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ce26093e539d727e7cf6f6f0d932b1ab0574dc02567e684377630d86723ace"}, - {file = "triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:227cc6f357c5efcb357f3867ac2a8e7ecea2298cd4606a8ba1e931d1d5a947df"}, -] - -[package.dependencies] -filelock = "*" - -[package.extras] -build = ["cmake (>=3.20)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"] -tutorials = ["matplotlib", "pandas", "tabulate", "torch"] - -[[package]] -name = "typer" -version = "0.12.3" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.7" -files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - -[[package]] -name = "types-cffi" -version = "1.16.0.20240331" -description = "Typing stubs for cffi" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, - {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, -] - -[package.dependencies] -types-setuptools = "*" - -[[package]] -name = "types-pyopenssl" -version = "24.1.0.20240722" -description = "Typing stubs for pyOpenSSL" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, - {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-cffi = "*" - -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240808" -description = "Typing stubs for PyYAML" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, - {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, -] - -[[package]] -name = "types-redis" -version = "4.6.0.20240806" -description = "Typing stubs for redis" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-redis-4.6.0.20240806.tar.gz", hash = "sha256:60dd02c2b91ea2d42ad079ac58dedc31d71d6eedb1c21d3796811b02baac655d"}, - {file = "types_redis-4.6.0.20240806-py3-none-any.whl", hash = "sha256:9d8fbe0ce37e3660c0a06982db7812384295d10a93d637c7f8604a2f3c88b0e6"}, -] - -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - -[[package]] -name = "types-setuptools" -version = "71.1.0.20240813" -description = "Typing stubs for setuptools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-setuptools-71.1.0.20240813.tar.gz", hash = "sha256:94ff4f0af18c7c24ac88932bcb0f5655fb7187a001b7c61e53a1bfdaf9877b54"}, - {file = "types_setuptools-71.1.0.20240813-py3-none-any.whl", hash = "sha256:d9d9ba2936f5d3b47b59ae9bf65942a60063ac1d6bbee180a8a79fbb43f22ce5"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "ujson" -version = "5.10.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, - {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, - {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, - {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, - {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, - {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, - {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, - {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, - {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, - {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, - {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, - {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, - {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, - {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, - {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, -] - -[[package]] -name = "uritemplate" -version = "4.1.1" -description = "Implementation of RFC 6570 URI Templates" -optional = false -python-versions = ">=3.6" -files = [ - {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, - {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, -] - -[[package]] -name = "urllib3" -version = "2.2.2" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "usearch" -version = "2.13.2" -description = "Smaller & Faster Single-File Vector Search Engine from Unum" -optional = false -python-versions = "*" -files = [ - {file = "usearch-2.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9d1e39e46bc132df19930b8432a32722946f339ebbdbdd0075fbc0819ba00103"}, - {file = "usearch-2.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6cb9ab2448c531c17847135e06cf00abdb6a45bfc06e13330144e0baf0b3fdb"}, - {file = "usearch-2.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1f649031009b4828ae87aba650ee620a617a98bfcacd501f76f0b92ad93aef77"}, - {file = "usearch-2.13.2-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c13219c73e506663fcb577722c57a91bcdbafc7e8d20f9d3233efee643dba72"}, - {file = "usearch-2.13.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2ce68c330273d7a1eb3e1ef39dc318f60bd74eca055877ece865c7c45c2440eb"}, - {file = "usearch-2.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3cc1ef99a7023d13d9c6e2d0cf182fe9f13b5fcafba559247c4cecfc12fa47ee"}, - {file = "usearch-2.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f57ff2e6d4c517b86908b9f77ebfb71e18db25110589f2b7c28b5f713d582ba2"}, - {file = "usearch-2.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:a11768735610d221f775ad34a9a904a637d94e71c9b0746243da7383197ca03e"}, - {file = "usearch-2.13.2-cp310-cp310-win_arm64.whl", hash = "sha256:906ad9304b0dc678fa79afd5282869c48bb88039914c4d4c14cf98b3fd8596da"}, - {file = "usearch-2.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3ba61347e7eda059c2f02dec4ad4ff89b317e10c9d25a73b06f92b8b2f40a855"}, - {file = "usearch-2.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11796b2d10bce16d373f9d2badc2ed361bd44b5b96e02fbd30c48adbb084c63d"}, - {file = "usearch-2.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5de2eb6d6468a369c051f7523d5431fa64d3b2331c6191b6430d7344de575eb"}, - {file = "usearch-2.13.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:30ca771280bb47de63cb3d77d727b5c5537f60477b1da9857e40d9835db7a664"}, - {file = "usearch-2.13.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a2c75f980536893d6e7879d2be34ad426b0823c3197b4a5e8d07cd6944787784"}, - {file = "usearch-2.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:682b5b7b2935269269d862ac651356bc80b5005e3943d7cbaecb949828a82359"}, - {file = "usearch-2.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4905b65f00b02f609f3fff6b954b1e912b0349498e907f926290094838d5e996"}, - {file = "usearch-2.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:f3dfcabd448547f1cd1d315a4f7493c360e0972a4bce0d0217a95a58e60d6369"}, - {file = "usearch-2.13.2-cp311-cp311-win_arm64.whl", hash = "sha256:7881dc53571fbb8b81ee4c41ca4d666d76441fe69f3e99641fa8da99b98ecbf1"}, - {file = "usearch-2.13.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:cadf54a120e76472ae8a355ba5189d524ef0a0a0cadf07c399669283128a47c8"}, - {file = "usearch-2.13.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14effe0f433847f41b7a2304165a23b6c6a0955e46a26731fc89cb4488d3debf"}, - {file = "usearch-2.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa17338313cf50e04cf11785e5892976513152a4b5f37b019602f772e35c4cc3"}, - {file = "usearch-2.13.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed6f52e841fb49e244bcbcdad982febaacd782eff1e8cf31377de02baa4e504"}, - {file = "usearch-2.13.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9d66e3274dbb71f978df4acd741da288bbdb129b9af6f5ac6223182f7f7f9fb8"}, - {file = "usearch-2.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:14b0fb3ac8829e805e4971d846d248e80f7b5274c59d845678bcaa6fbe84426d"}, - {file = "usearch-2.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9ce2f471bf3e947841f446f8e44963edffa90db66f5d315d0e0e738f0369264f"}, - {file = "usearch-2.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:9a69c6ae3d35e9fa03366782984ff97df3a8ee4d6995d51dee5bdf59fb13c5be"}, - {file = "usearch-2.13.2-cp312-cp312-win_arm64.whl", hash = "sha256:9bfecb48814b77c439f8c0d72eb6e645d3a00a16f9385643f78732e4c207b68a"}, - {file = "usearch-2.13.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f6055b056557a02b63506c2c6bf30b97f7645f212accba1f4fdce8826ccfa823"}, - {file = "usearch-2.13.2-cp37-cp37m-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5115c25e1ba4a5beff0fa4780ea7db3b60a827efe3f72453b7fee6b299878d19"}, - {file = "usearch-2.13.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:04aca42da4dcccd20c6524a3ece6e4e3e458ea5a15fd51f2d39bc9b353d475c0"}, - {file = "usearch-2.13.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:ba46879670aa27fff4d5446296a95d1ff62e52d9165d8ac6ac3fdd949998d0c9"}, - {file = "usearch-2.13.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d25bcea3f81d1bf2e836dc35f3c83d7d39b7123b4b39f77827af547fec5b8d15"}, - {file = "usearch-2.13.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7dcd9c9c1509dc6435d332569f05312eba6dab820b5ed28674e0b0444de23057"}, - {file = "usearch-2.13.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:16014dd2e8b798eb8012223c51847b59d9ad8b7a9424b6ae32101f3f31d6e711"}, - {file = "usearch-2.13.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:158aeb09fecc25d725465c0c6dee0793fe34eae668e23545eb927706e9ac1e35"}, - {file = "usearch-2.13.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd0c6f27c07505929f09a90637c59f3719a0b2201faed61ee3cbeca65af56165"}, - {file = "usearch-2.13.2-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:64cf63b0e0a707d0064fd0d0eb73899d36a6ed6f694603d24e3fb6921903b09c"}, - {file = "usearch-2.13.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:2bbae49cabea6982cb1a8f68aab0a3772c8f9ce0e9e6a9842969b39d391c919b"}, - {file = "usearch-2.13.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cb586334a6b801fe2a6ca7dae5af7a1b7c26aa01efffef708eff35cda45ce5a3"}, - {file = "usearch-2.13.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:94d144f5a5616a1b5f333219ee3f05420aa2fd44d6463e58affaf0e62bd1143d"}, - {file = "usearch-2.13.2-cp38-cp38-win_amd64.whl", hash = "sha256:30dac0f71a6f05c80075f62e32b1a535b41a5073499ecbe577ca0298c1be8a8c"}, - {file = "usearch-2.13.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b7eeeda7d2f9f3b5e0fbd0c6befc783461c43777a97ae46a358acd44500ce8a4"}, - {file = "usearch-2.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b01ce27094c30e370766b145190842f2715362113da712322bc9eed7a1099d2"}, - {file = "usearch-2.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f0a4afa048fec3893651841c6430e6b98f85c1a9690687823fdf6c31712bd09f"}, - {file = "usearch-2.13.2-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef1cce3580f946d97b9b58278b6960632abcd4b62c2be566f0ea11dd78cc0252"}, - {file = "usearch-2.13.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8c48a1e24f37c97e698471ecd25393ef5291a71f0e90887a1fe0001dfbe19aa5"}, - {file = "usearch-2.13.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bfbd43571f42af16cd30796d7132edfe5514088bafc96f5178caf4990e1efd14"}, - {file = "usearch-2.13.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:486134f647b3ddc5baae49f57ef014618bb7c9f0d2b8c6adc178ab793ad2191f"}, - {file = "usearch-2.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:a92a2fa400024a5bf0a09d0d49f86db6db787eb9d7de7b1f2f0249e796e9408c"}, - {file = "usearch-2.13.2-cp39-cp39-win_arm64.whl", hash = "sha256:bc39d38d8552325dd87ce2946ec94ab7f65e5895e8e681d5996d79197d8adfeb"}, -] - -[package.dependencies] -numpy = "*" -tqdm = "*" - -[[package]] -name = "uvicorn" -version = "0.30.6" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -files = [ - {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, - {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, -] - -[package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} -h11 = ">=0.8" -httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} -python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} -watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} - -[package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "uvloop" -version = "0.19.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, -] - -[package.extras] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - -[[package]] -name = "validators" -version = "0.33.0" -description = "Python Data Validation for Humansโ„ข" -optional = false -python-versions = ">=3.8" -files = [ - {file = "validators-0.33.0-py3-none-any.whl", hash = "sha256:134b586a98894f8139865953899fc2daeb3d0c35569552c5518f089ae43ed075"}, - {file = "validators-0.33.0.tar.gz", hash = "sha256:535867e9617f0100e676a1257ba1e206b9bfd847ddc171e4d44811f07ff0bfbf"}, -] - -[package.extras] -crypto-eth-addresses = ["eth-hash[pycryptodome] (>=0.7.0)"] - -[[package]] -name = "virtualenv" -version = "20.26.3" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "watchfiles" -version = "0.23.0" -description = "Simple, modern and high performance file watching and code reload in python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4"}, - {file = "watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1"}, - {file = "watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b"}, - {file = "watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981"}, - {file = "watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42"}, - {file = "watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab"}, - {file = "watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788"}, - {file = "watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220"}, - {file = "watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320"}, - {file = "watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b"}, - {file = "watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e"}, - {file = "watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5"}, - {file = "watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f"}, - {file = "watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c"}, - {file = "watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581"}, - {file = "watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75"}, - {file = "watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb"}, - {file = "watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9"}, - {file = "watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d"}, - {file = "watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef"}, - {file = "watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1"}, - {file = "watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b"}, - {file = "watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2dddc2487d33e92f8b6222b5fb74ae2cfde5e8e6c44e0248d24ec23befdc5366"}, - {file = "watchfiles-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e75695cc952e825fa3e0684a7f4a302f9128721f13eedd8dbd3af2ba450932b8"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2537ef60596511df79b91613a5bb499b63f46f01a11a81b0a2b0dedf645d0a9c"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20b423b58f5fdde704a226b598a2d78165fe29eb5621358fe57ea63f16f165c4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b98732ec893975455708d6fc9a6daab527fc8bbe65be354a3861f8c450a632a4"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee1f5fcbf5bc33acc0be9dd31130bcba35d6d2302e4eceafafd7d9018c7755ab"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f195338a5a7b50a058522b39517c50238358d9ad8284fd92943643144c0c03"}, - {file = "watchfiles-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524fcb8d59b0dbee2c9b32207084b67b2420f6431ed02c18bd191e6c575f5c48"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0eff099a4df36afaa0eea7a913aa64dcf2cbd4e7a4f319a73012210af4d23810"}, - {file = "watchfiles-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a8323daae27ea290ba3350c70c836c0d2b0fb47897fa3b0ca6a5375b952b90d3"}, - {file = "watchfiles-0.23.0-cp38-none-win32.whl", hash = "sha256:aafea64a3ae698695975251f4254df2225e2624185a69534e7fe70581066bc1b"}, - {file = "watchfiles-0.23.0-cp38-none-win_amd64.whl", hash = "sha256:c846884b2e690ba62a51048a097acb6b5cd263d8bd91062cd6137e2880578472"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a753993635eccf1ecb185dedcc69d220dab41804272f45e4aef0a67e790c3eb3"}, - {file = "watchfiles-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6bb91fa4d0b392f0f7e27c40981e46dda9eb0fbc84162c7fb478fe115944f491"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1f67312efa3902a8e8496bfa9824d3bec096ff83c4669ea555c6bdd213aa516"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ca6b71dcc50d320c88fb2d88ecd63924934a8abc1673683a242a7ca7d39e781"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aec5c29915caf08771d2507da3ac08e8de24a50f746eb1ed295584ba1820330"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1733b9bc2c8098c6bdb0ff7a3d7cb211753fecb7bd99bdd6df995621ee1a574b"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02ff5d7bd066c6a7673b17c8879cd8ee903078d184802a7ee851449c43521bdd"}, - {file = "watchfiles-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e2de19801b0eaa4c5292a223effb7cfb43904cb742c5317a0ac686ed604765"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8ada449e22198c31fb013ae7e9add887e8d2bd2335401abd3cbc55f8c5083647"}, - {file = "watchfiles-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3af1b05361e1cc497bf1be654a664750ae61f5739e4bb094a2be86ec8c6db9b6"}, - {file = "watchfiles-0.23.0-cp39-none-win32.whl", hash = "sha256:486bda18be5d25ab5d932699ceed918f68eb91f45d018b0343e3502e52866e5e"}, - {file = "watchfiles-0.23.0-cp39-none-win_amd64.whl", hash = "sha256:d2d42254b189a346249424fb9bb39182a19289a2409051ee432fb2926bad966a"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed"}, - {file = "watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aba037c1310dd108411d27b3d5815998ef0e83573e47d4219f45753c710f969f"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a96ac14e184aa86dc43b8a22bb53854760a58b2966c2b41580de938e9bf26ed0"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11698bb2ea5e991d10f1f4f83a39a02f91e44e4bd05f01b5c1ec04c9342bf63c"}, - {file = "watchfiles-0.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efadd40fca3a04063d40c4448c9303ce24dd6151dc162cfae4a2a060232ebdcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:556347b0abb4224c5ec688fc58214162e92a500323f50182f994f3ad33385dcb"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1cf7f486169986c4b9d34087f08ce56a35126600b6fef3028f19ca16d5889071"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18de0f82c62c4197bea5ecf4389288ac755896aac734bd2cc44004c56e4ac47"}, - {file = "watchfiles-0.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:532e1f2c491274d1333a814e4c5c2e8b92345d41b12dc806cf07aaff786beb66"}, - {file = "watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b"}, -] - -[package.dependencies] -anyio = ">=3.0.0" - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "weaviate-client" -version = "4.7.1" -description = "A python native Weaviate client" -optional = false -python-versions = ">=3.8" -files = [ - {file = "weaviate_client-4.7.1-py3-none-any.whl", hash = "sha256:342f5c67b126cee4dc3a60467ad1ae74971cd5614e27af6fb13d687a345352c4"}, - {file = "weaviate_client-4.7.1.tar.gz", hash = "sha256:af99ac4e53613d2ff5b797372e95d004d0c8a1dd10a7f592068bcb423a30af30"}, -] - -[package.dependencies] -authlib = ">=1.2.1,<2.0.0" -grpcio = ">=1.57.0,<2.0.0" -grpcio-health-checking = ">=1.57.0,<2.0.0" -grpcio-tools = ">=1.57.0,<2.0.0" -httpx = ">=0.25.0,<=0.27.0" -pydantic = ">=2.5.0,<3.0.0" -requests = ">=2.30.0,<3.0.0" -validators = "0.33.0" - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, -] - -[[package]] -name = "werkzeug" -version = "3.0.3" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, -] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[[package]] -name = "zipp" -version = "3.20.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[extras] -all = ["anthropic", "azure-ai-inference", "azure-core", "azure-cosmos", "azure-identity", "azure-search-documents", "chromadb", "ipykernel", "milvus", "mistralai", "motor", "ollama", "pinecone-client", "psycopg", "pyarrow", "pymilvus", "qdrant-client", "redis", "sentence-transformers", "torch", "transformers", "usearch", "weaviate-client"] -anthropic = ["anthropic"] -azure = ["azure-ai-inference", "azure-core", "azure-cosmos", "azure-identity", "azure-search-documents"] -chromadb = ["chromadb"] -google = ["google-cloud-aiplatform", "google-generativeai"] -hugging-face = ["sentence-transformers", "torch", "transformers"] -milvus = ["milvus", "pymilvus"] -mistralai = ["mistralai"] -mongo = ["motor"] -notebooks = ["ipykernel"] -ollama = ["ollama"] -pinecone = ["pinecone-client"] -postgres = ["psycopg"] -qdrant = ["qdrant-client"] -redis = ["redis", "types-redis"] -usearch = ["pyarrow", "usearch"] -weaviate = ["weaviate-client"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.10,<3.13" -content-hash = "45bffc6686e76fda8799c7a786d0618594cf8f8b7450bb8d805423882a0c20b3" diff --git a/python/pyproject.toml b/python/pyproject.toml index da2c21769037..649f9415a942 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,195 +1,140 @@ -[tool.poetry] +[project] name = "semantic-kernel" -version = "1.6.0" description = "Semantic Kernel Python SDK" -authors = ["Microsoft "] +authors = [{ name = "Microsoft", email = "SK-Support@microsoft.com"}] readme = "pip/README.md" -packages = [{include = "semantic_kernel"}] -homepage = "https://learn.microsoft.com/en-us/semantic-kernel/overview/" -repository = "https://github.com/microsoft/semantic-kernel/" - -[tool.poetry.urls] -"Source Code" = "https://github.com/microsoft/semantic-kernel/tree/main/python" -"Release Notes" = "https://github.com/microsoft/semantic-kernel/releases?q=tag%3Apython-1&expanded=true" - -[tool.poetry.dependencies] -python = "^3.10,<3.13" - -# main dependencies -aiohttp = "^3.8" -pydantic = "^2" -pydantic-settings = "^2" -defusedxml = "^0.7.1" - -# embeddings -numpy = [ - { version = ">=1.25", python = "<3.12" }, - { version = ">=1.26", python = ">=3.12" }, +# Version read from __version__ field in __init__.py by Flit +dynamic = ["version"] +requires-python = ">=3.10,<3.13" +license = {file = "LICENSE"} +urls.homepage = "https://learn.microsoft.com/en-us/semantic-kernel/overview/" +urls.source = "https://github.com/microsoft/semantic-kernel/tree/main/python" +urls.release_notes = "https://github.com/microsoft/semantic-kernel/releases?q=tag%3Apython-1&expanded=true" +urls.issues = "https://github.com/microsoft/semantic-kernel/issues" +classifiers = [ + "License :: OSI Approved :: MIT License", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Framework :: Pydantic :: 2", + "Typing :: Typed", +] +dependencies = [ + "aiohttp ~= 3.8", + "pydantic ~= 2.0", + "pydantic-settings ~= 2.0", + "defusedxml ~= 0.7", + + # azure identity + "azure-identity ~= 1.13", + + # embeddings + "numpy >= 1.25.0; python_version < '3.12'", + "numpy >= 1.26.0; python_version >= '3.12'", + + # openai connector + "openai ~= 1.0", + + # openapi and swagger + "openapi_core >= 0.18,<0.20", + + # OpenTelemetry + "opentelemetry-api ~= 1.24", + "opentelemetry-sdk ~= 1.24", + "prance ~= 23.6.21.0", + + # templating + "pybars4 ~= 0.9", + "jinja2 ~= 3.1", + "nest-asyncio ~= 1.6" ] - -# openai connector -openai = ">=1.0" - -# openapi and swagger -openapi_core = ">=0.18,<0.20" - -# OpenTelemetry -opentelemetry-api = "^1.24.0" -opentelemetry-sdk = "^1.24.0" - -prance = "^23.6.21.0" - -# templating -pybars4 = "^0.9.13" -jinja2 = "^3.1.3" -nest-asyncio = "^1.6.0" ### Optional dependencies -# azure -azure-ai-inference = {version = "^1.0.0b1", allow-prereleases = true, optional = true} -azure-search-documents = {version = "11.6.0b4", allow-prereleases = true, optional = true} -azure-core = { version = "^1.28.0", optional = true} -azure-identity = { version = "^1.13.0", optional = true} -azure-cosmos = { version = "^4.7.0", optional = true} -# chroma -chromadb = { version = ">=0.4.13,<0.6.0", optional = true} -# google -google-cloud-aiplatform = { version = "^1.60.0", optional = true} -google-generativeai = { version = "^0.7.2", optional = true} -# hugging face -transformers = { version = "^4.28.1", extras=['torch'], optional = true} -sentence-transformers = { version = "^2.2.2", optional = true} -torch = {version = "2.2.2", optional = true} -# mongo -motor = { version = "^3.3.2", optional = true } -# notebooks -ipykernel = { version = "^6.21.1", optional = true} -# milvus -pymilvus = { version = ">=2.3,<2.4.6", optional = true} -milvus = { version = ">=2.3,<2.3.8", markers = 'sys_platform != "win32"', optional = true} -# mistralai -mistralai = { version = "^0.4.1", optional = true} -# ollama -ollama = { version = "^0.2.1", optional = true} -# anthropic -anthropic = { version = "^0.32.0", optional = true } -# pinecone -pinecone-client = { version = "^5.0.0", optional = true} -# postgres -psycopg = { version="^3.2.1", extras=["binary","pool"], optional = true} -# qdrant -qdrant-client = { version = '^1.9', optional = true} -# redis -redis = { version = "^5.0.7", extras=['hiredis'], optional = true} -types-redis = { version="^4.6.0.20240425", optional = true } -# usearch -usearch = { version = "^2.9", optional = true} -pyarrow = { version = ">=12.0.1,<18.0.0", optional = true} -weaviate-client = { version = ">=3.18,<5.0", optional = true} -pandas = {version = "^2.2.2", optional = true} - -[tool.poetry.group.dev.dependencies] -pre-commit = ">=3.7.1" -ipykernel = "^6.29.4" -nbconvert = "^7.16.4" -pytest = "^8.2.1" -pytest-xdist = { version="^3.6.1", extras=["psutil"]} -pytest-cov = ">=5.0.0" -pytest-asyncio = "^0.23.7" -snoop = "^0.4.3" -mypy = ">=1.10.0" -types-PyYAML = "^6.0.12.20240311" -ruff = "^0.5.2" - -[tool.poetry.group.unit-tests] -optional = true - -[tool.poetry.group.unit-tests.dependencies] -azure-ai-inference = {version = "^1.0.0b1", allow-prereleases = true} -azure-search-documents = {version = "11.6.0b4", allow-prereleases = true} -azure-core = "^1.28.0" -azure-cosmos = "^4.7.0" -mistralai = "^0.4.1" -ollama = "^0.2.1" -google-cloud-aiplatform = "^1.60.0" -anthropic = "^0.32.0" -google-generativeai = "^0.7.2" -transformers = { version = "^4.28.1", extras=['torch']} -sentence-transformers = { version = "^2.2.2"} -torch = {version = "2.2.2"} -# qdrant -qdrant-client = '^1.9' -# redis -redis = { version = "^5.0.7", extras=['hiredis']} -pandas = {version = "^2.2.2"} - -[tool.poetry.group.tests] -optional = true - -[tool.poetry.group.tests.dependencies] -# azure -azure-ai-inference = {version = "^1.0.0b1", allow-prereleases = true} -azure-search-documents = {version = "11.6.0b4", allow-prereleases = true} -azure-core = "^1.28.0" -azure-identity = "^1.13.0" -azure-cosmos = "^4.7.0" -msgraph-sdk = "^1.2.0" -# chroma -chromadb = ">=0.4.13,<0.6.0" -# google -google-cloud-aiplatform = "^1.60.0" -google-generativeai = "^0.7.2" -# hugging face -transformers = { version = "^4.28.1", extras=['torch']} -sentence-transformers = { version = "^2.2.2"} -torch = {version = "2.2.2"} -# milvus -pymilvus = ">=2.3,<2.4.6" -milvus = { version = ">=2.3,<2.3.8", markers = 'sys_platform != "win32"'} -# mistralai -mistralai = "^0.4.1" -# ollama -ollama = "^0.2.1" -# anthropic -anthropic = "^0.32.0" -# mongodb -motor = "^3.3.2" -# pinecone -pinecone-client = "^5.0.0" -# postgres -psycopg = { version="^3.1.9", extras=["binary","pool"]} -# qdrant -qdrant-client = '^1.9' -# redis -redis = { version="^5.0.7", extras=['hiredis']} -types-redis = { version="^4.6.0.20240425" } -# usearch -usearch = "^2.9" -pyarrow = ">=12.0.1,<18.0.0" -# weaviate -weaviate-client = ">=3.18,<5.0" -pandas = {version = "^2.2.2"} - -# Extras are exposed to pip, this allows a user to easily add the right dependencies to their environment -[tool.poetry.extras] -all = ["transformers", "sentence-transformers", "torch", "qdrant-client", "chromadb", "pymilvus", "milvus", "mistralai", "ollama", "anthropic", "google", "weaviate-client", "pinecone-client", "psycopg", "redis", "azure-ai-inference", "azure-search-documents", "azure-core", "azure-identity", "azure-cosmos", "usearch", "pyarrow", "ipykernel", "motor"] - -azure = ["azure-ai-inference", "azure-search-documents", "azure-core", "azure-identity", "azure-cosmos", "msgraph-sdk"] -chromadb = ["chromadb"] -google = ["google-cloud-aiplatform", "google-generativeai"] -hugging_face = ["transformers", "sentence-transformers", "torch"] -milvus = ["pymilvus", "milvus"] -mistralai = ["mistralai"] -ollama = ["ollama"] -anthropic = ["anthropic"] -mongo = ["motor"] -notebooks = ["ipykernel"] -pinecone = ["pinecone-client"] -postgres = ["psycopg"] -qdrant = ["qdrant-client"] -redis = ["redis", "types-redis"] -usearch = ["usearch", "pyarrow"] -weaviate = ["weaviate-client"] +[project.optional-dependencies] +azure = [ + "azure-ai-inference >= 1.0.0b3", + "azure-search-documents >= 11.6.0b4", + "azure-identity ~= 1.13", + "azure-cosmos ~= 4.7" +] +chroma = [ + "chromadb >= 0.4,<0.6" +] +google = [ + "google-cloud-aiplatform ~= 1.60", + "google-generativeai ~= 0.7" +] +hugging_face = [ + "transformers[torch] ~= 4.28", + "sentence-transformers >= 2.2,< 4.0", + "torch == 2.4.1" +] +mongo = [ + "motor >= 3.3.2,< 3.7.0" +] +notebooks = [ + "ipykernel ~= 6.29" +] +milvus = [ + "pymilvus >= 2.3,< 2.5", + "milvus >= 2.3,<2.3.8; platform_system != 'Windows'" +] +mistralai = [ + "mistralai >= 0.4,< 2.0" +] +ollama = [ + "ollama ~= 0.2" +] +anthropic = [ + "anthropic ~= 0.32" +] +pinecone = [ + "pinecone-client ~= 5.0" +] +postgres = [ + "psycopg[binary,pool] ~= 3.2" +] +qdrant = [ + "qdrant-client ~= 1.9" +] +redis = [ + "redis[hiredis] ~= 5.0", + "types-redis ~= 4.6.0.20240425" +] +usearch = [ + "usearch ~= 2.9", + "pyarrow >= 12.0,<18.0" +] +weaviate = [ + "weaviate-client >= 3.18,<5.0" +] +pandas = [ + "pandas ~= 2.2" +] + +[tool.uv] +prerelease = "if-necessary-or-explicit" +dev-dependencies = [ + "pre-commit ~= 3.7", + "ipykernel ~= 6.29", + "nbconvert ~= 7.16", + "pytest ~= 8.2", + "pytest-xdist[psutil] ~= 3.6", + "pytest-cov >= 5.0", + "pytest-asyncio ~= 0.23", + "snoop ~= 0.4", + "mypy >= 1.10", + "types-PyYAML ~= 6.0.12.20240311", + "ruff ~= 0.5" +] +environments = [ + "sys_platform == 'darwin'", + "sys_platform == 'linux'", + "sys_platform == 'win32'" +] [tool.pytest.ini_options] addopts = "-ra -q -r fEX" @@ -198,11 +143,11 @@ addopts = "-ra -q -r fEX" line-length = 120 target-version = "py310" include = ["*.py", "*.pyi", "**/pyproject.toml", "*.ipynb"] +preview = true [tool.ruff.lint] fixable = ["ALL"] unfixable = [] -preview = true select = [ "D", #pydocstyle checks "E", #error checks @@ -227,7 +172,6 @@ ignore = [ ] [tool.ruff.format] -preview = true docstring-code-format = true [tool.ruff.lint.pydocstyle] @@ -246,10 +190,13 @@ notice-rgx = "^# Copyright \\(c\\) Microsoft\\. All rights reserved\\." min-file-size = 1 [tool.bandit] -targets = ["python/semantic_kernel"] -exclude_dirs = ["python/tests"] +targets = ["semantic_kernel"] +exclude_dirs = ["tests"] + +[tool.flit.module] +name = "semantic_kernel" [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["flit-core >= 3.9,<4.0"] +build-backend = "flit_core.buildapi" diff --git a/python/samples/README.md b/python/samples/README.md new file mode 100644 index 000000000000..3062daa353f7 --- /dev/null +++ b/python/samples/README.md @@ -0,0 +1,9 @@ +## Semantic Kernel Samples + +| Type | Description | +| ------------------------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------- | +| [`getting_started`](./getting_started/CONFIGURING_THE_KERNEL.md) | Take this step by step tutorial to get started with Semantic Kernel and get introduced to the key concepts. | +| [`getting_started_with_agents`](./getting_started_with_agents/README.md) | Take this step by step tutorial to get started with Semantic Kernel Agents and get introduced to the key concepts. | +| [`concepts`](./concepts/README.md) | This section contains focused samples which illustrate all of the concepts included in Semantic Kernel. | +| [`demos`](./demos/README.md) | Look here to find a sample which demonstrate how to use many of Semantic Kernel features. | +| [`learn_resources`](./learn_resources/README.md) | Code snippets that are related to online documentation sources like Microsoft Learn, DevBlogs and others | diff --git a/python/samples/concepts/README.md b/python/samples/concepts/README.md index 0ef6120ad285..507f2dee5650 100644 --- a/python/samples/concepts/README.md +++ b/python/samples/concepts/README.md @@ -22,6 +22,7 @@ This section contains code snippets that demonstrate the usage of Semantic Kerne | Search | Using search services information | | Service Selector | Shows how to create and use a custom service selector class. | | Setup | How to setup environment variables for Semantic Kernel | +| Structured Output | How to leverage OpenAI's json_schema structured output functionality. | | TextGeneration | Using [`TextGeneration`](https://github.com/microsoft/semantic-kernel/blob/main/python/semantic_kernel/connectors/ai/text_completion_client_base.py) capable service with models | # Configuring the Kernel @@ -40,6 +41,12 @@ In Semantic Kernel for Python, we leverage Pydantic Settings to manage configura 3. **Direct Constructor Input:** - As an alternative to environment variables and `.env` files, you can pass the required settings directly through the constructor of the AI Connector or Memory Connector. +## Microsoft Entra Token Authentication + +To authenticate to your Azure resources using a Microsoft Entra Authentication Token, the `AzureChatCompletion` AI Service connector now supports this as a built-in feature. If you do not provide an API key -- either through an environment variable, a `.env` file, or the constructor -- and you also do not provide a custom `AsyncAzureOpenAI` client, an `ad_token`, or an `ad_token_provider`, the `AzureChatCompletion` connector will attempt to retrieve a token using the [`DefaultAzureCredential`](https://learn.microsoft.com/en-us/python/api/azure-identity/azure.identity.defaultazurecredential?view=azure-python). + +To successfully retrieve and use the Entra Auth Token, you need the `Cognitive Services OpenAI Contributor` role assigned to your Azure OpenAI resource. By default, the `https://cognitiveservices.azure.com` token endpoint is used. You can override this endpoint by setting an environment variable `.env` variable as `AZURE_OPENAI_TOKEN_ENDPOINT` or by passing a new value to the `AzureChatCompletion` constructor as part of the `AzureOpenAISettings`. + ## Best Practices - **.env File Placement:** We highly recommend placing the `.env` file in the `semantic-kernel/python` root directory. This is a common practice when developing in the Semantic Kernel repository. diff --git a/python/samples/concepts/agents/README.md b/python/samples/concepts/agents/README.md new file mode 100644 index 000000000000..1260395f88f2 --- /dev/null +++ b/python/samples/concepts/agents/README.md @@ -0,0 +1,37 @@ +# Semantic Kernel: Agent concept examples + +This project contains a step by step guide to get started with _Semantic Kernel Agents_ in Python. + +#### PyPI: +- For the use of Chat Completion agents, the minimum allowed Semantic Kernel pypi version is 1.3.0. +- For the use of OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.4.0. +- For the use of Agent Group Chat, the minimum allowed Semantic kernel pypi version is 1.6.0. +- For the use of Streaming OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.11.0 + +#### Source + +- [Semantic Kernel Agent Framework](../../../semantic_kernel/agents/) + +## Examples + +The concept agents examples are grouped by prefix: + +Prefix|Description +---|--- +assistant|How to use agents based on the [Open AI Assistant API](https://platform.openai.com/docs/assistants). +chat_completion|How to use Semantic Kernel Chat Completion agents. +mixed_chat|How to combine different agent types. +complex_chat|**Coming Soon** + +*Note: As we strive for parity with .NET, more getting_started_with_agent samples will be added. The current steps and names may be revised to further align with our .NET counterpart.* + +## Configuring the Kernel + +Similar to the Semantic Kernel Python concept samples, it is necessary to configure the secrets +and keys used by the kernel. See the follow "Configuring the Kernel" [guide](../README.md#configuring-the-kernel) for +more information. + +## Running Concept Samples + +Concept samples can be run in an IDE or via the command line. After setting up the required api key or token authentication +for your AI connector, the samples run without any extra command line arguments. diff --git a/python/samples/concepts/agents/assistant_agent_chart_maker.py b/python/samples/concepts/agents/assistant_agent_chart_maker.py index 95148734d39b..34cfd77f40c0 100644 --- a/python/samples/concepts/agents/assistant_agent_chart_maker.py +++ b/python/samples/concepts/agents/assistant_agent_chart_maker.py @@ -4,6 +4,7 @@ from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel import Kernel @@ -19,6 +20,8 @@ # Note: you may toggle this to switch between AzureOpenAI and OpenAI use_azure_openai = True +streaming = True + # A helper method to invoke the agent with the user input async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: @@ -27,14 +30,29 @@ async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) print(f"# {AuthorRole.USER}: '{input}'") - async for message in agent.invoke(thread_id=thread_id): - if message.content: - print(f"# {message.role}: {message.content}") - - if len(message.items) > 0: - for item in message.items: - if isinstance(item, FileReferenceContent): - print(f"\n`{message.role}` => {item.file_id}") + if streaming: + first_chunk = True + async for message in agent.invoke_stream(thread_id=thread_id): + if message.content: + if first_chunk: + print(f"# {message.role}: ", end="", flush=True) + first_chunk = False + print(message.content, end="", flush=True) + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, StreamingFileReferenceContent): + print(f"\n# {message.role} => {item.file_id}") + print() + else: + async for message in agent.invoke(thread_id=thread_id): + if message.content: + print(f"# {message.role}: {message.content}") + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, FileReferenceContent): + print(f"\n`{message.role}` => {item.file_id}") async def main(): diff --git a/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py b/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py new file mode 100644 index 000000000000..ce985c009758 --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_file_manipulation_streaming.py @@ -0,0 +1,88 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +import os + +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and leverage the # +# assistant's ability to stream the response and have the code # +# interpreter work with uploaded files # +##################################################################### + +AGENT_NAME = "FileManipulation" +AGENT_INSTRUCTIONS = "Find answers to the user's questions in the provided file." + + +# A helper method to invoke the agent with the user input +async def invoke_streaming_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the streaming agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + elif content.role == AuthorRole.TOOL and content.metadata.get("code"): + print("") + print(f"# {content.role} (code):\n\n{content.content}") + print() + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Get the path to the sales.csv file + csv_file_path = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", + "agent_assistant_file_manipulation", + "sales.csv", + ) + + # Create the assistant agent + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + code_interpreter_filenames=[csv_file_path], + ) + + # Create a thread and specify the file to use for code interpretation + thread_id = await agent.create_thread() + + try: + await invoke_streaming_agent(agent, thread_id=thread_id, input="Which segment had the most sales?") + await invoke_streaming_agent( + agent, thread_id=thread_id, input="List the top 5 countries that generated the most profit." + ) + await invoke_streaming_agent( + agent, + thread_id=thread_id, + input="Create a tab delimited file report of profit by each country per month.", + ) + finally: + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_retrieval.py b/python/samples/concepts/agents/assistant_agent_retrieval.py new file mode 100644 index 000000000000..02a5e4a02094 --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_retrieval.py @@ -0,0 +1,84 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio + +from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI and retrieve the # +# assistant using the `retrieve` class method. # +##################################################################### + +AGENT_NAME = "JokeTeller" +AGENT_INSTRUCTIONS = "You are a funny comedian who loves telling G-rated jokes." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, input: str) -> None: + """Invoke the agent with the user input.""" + await agent.add_chat_message(thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=input)) + + print(f"# {AuthorRole.USER}: '{input}'") + + async for message in agent.invoke(thread_id=thread_id): + if message.content: + print(f"# {message.role}: {message.content}") + + if len(message.items) > 0: + for item in message.items: + if isinstance(item, FileReferenceContent): + print(f"\n`{message.role}` => {item.file_id}") + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Define a service_id for the sample + service_id = "agent" + + # Create the agent configuration + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, + service_id=service_id, + name=AGENT_NAME, + instructions=AGENT_INSTRUCTIONS, + enable_code_interpreter=True, + ) + + assistant_id = agent.assistant.id + + # Retrieve the agent using the assistant_id + retrieved_agent: OpenAIAssistantAgent = await OpenAIAssistantAgent.retrieve( + id=assistant_id, + kernel=kernel, + ) + + # Define a thread and invoke the agent with the user input + thread_id = await retrieved_agent.create_thread() + + try: + await invoke_agent(retrieved_agent, thread_id, "Tell me a joke about bears.") + finally: + await agent.delete() + await retrieved_agent.delete_thread(thread_id) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/assistant_agent_streaming.py b/python/samples/concepts/agents/assistant_agent_streaming.py new file mode 100644 index 000000000000..64439ba4e7c1 --- /dev/null +++ b/python/samples/concepts/agents/assistant_agent_streaming.py @@ -0,0 +1,110 @@ +# Copyright (c) Microsoft. All rights reserved. +import asyncio +from typing import Annotated + +from semantic_kernel.agents.open_ai import AzureAssistantAgent, OpenAIAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI. OpenAI Assistants # +# allow for function calling, the use of file search and a # +# code interpreter. Assistant Threads are used to manage the # +# conversation state, similar to a Semantic Kernel Chat History. # +# This sample also demonstrates the Assistants Streaming # +# capability and how to manage an Assistants chat history. # +##################################################################### + +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + +# Note: you may toggle this to switch between AzureOpenAI and OpenAI +use_azure_openai = True + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +# A helper method to invoke the agent with the user input +async def invoke_agent( + agent: OpenAIAssistantAgent, thread_id: str, input: str, history: list[ChatMessageContent] +) -> None: + """Invoke the agent with the user input.""" + message = ChatMessageContent(role=AuthorRole.USER, content=input) + await agent.add_chat_message(thread_id=thread_id, message=message) + + # Add the user message to the history + history.append(message) + + print(f"# {AuthorRole.USER}: '{input}'") + + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread_id, messages=history): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print() + + +async def main(): + # Create the instance of the Kernel + kernel = Kernel() + + # Add the sample plugin to the kernel + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + + # Create the OpenAI Assistant Agent + service_id = "agent" + if use_azure_openai: + agent = await AzureAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + else: + agent = await OpenAIAssistantAgent.create( + kernel=kernel, service_id=service_id, name=HOST_NAME, instructions=HOST_INSTRUCTIONS + ) + + thread_id = await agent.create_thread() + + history: list[ChatMessageContent] = [] + + try: + await invoke_agent(agent, thread_id=thread_id, input="Hello", history=history) + await invoke_agent(agent, thread_id=thread_id, input="What is the special soup?", history=history) + await invoke_agent(agent, thread_id=thread_id, input="What is the special drink?", history=history) + await invoke_agent(agent, thread_id=thread_id, input="Thank you", history=history) + finally: + await agent.delete_thread(thread_id) + await agent.delete() + + # You may then view the conversation history + print("========= Conversation History =========") + for content in history: + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") + print("========= End of Conversation History =========") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_function_termination.py b/python/samples/concepts/agents/chat_completion_function_termination.py new file mode 100644 index 000000000000..38ee6e76d832 --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_function_termination.py @@ -0,0 +1,133 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.filters.auto_function_invocation.auto_function_invocation_context import ( + AutoFunctionInvocationContext, +) +from semantic_kernel.filters.filter_types import FilterTypes +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to configure the auto # +# function invocation filter with use of a ChatCompletionAgent. # +################################################################### + + +# Define the agent name and instructions +HOST_NAME = "Host" +HOST_INSTRUCTIONS = "Answer questions about the menu." + + +# Define the auto function invocation filter that will be used by the kernel +async def auto_function_invocation_filter(context: AutoFunctionInvocationContext, next): + """A filter that will be called for each function call in the response.""" + # if we don't call next, it will skip this function, and go to the next one + await next(context) + if context.function.plugin_name == "menu": + context.terminate = True + + +# Define a sample plugin for the sample +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +def _create_kernel_with_chat_completionand_filter(service_id: str) -> Kernel: + """A helper function to create a kernel with a chat completion service and a filter.""" + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + kernel.add_filter(FilterTypes.AUTO_FUNCTION_INVOCATION, auto_function_invocation_filter) + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + return kernel + + +def _write_content(content: ChatMessageContent) -> None: + """Write the content to the console.""" + last_item_type = type(content.items[-1]).__name__ if content.items else "(empty)" + message_content = "" + if isinstance(last_item_type, FunctionCallContent): + message_content = f"tool request = {content.items[-1].function_name}" + elif isinstance(last_item_type, FunctionResultContent): + message_content = f"function result = {content.items[-1].result}" + else: + message_content = str(content.items[-1]) + print(f"[{last_item_type}] {content.role} : '{message_content}'") + + +# A helper method to invoke the agent with the user input +async def invoke_agent(agent: ChatCompletionAgent, input: str, chat_history: ChatHistory) -> None: + """Invoke the agent with the user input.""" + chat_history.add_user_message(input) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in agent.invoke(chat_history): + if not any(isinstance(item, (FunctionCallContent, FunctionResultContent)) for item in content.items): + chat_history.add_message(content) + _write_content(content) + + +async def main(): + service_id = "agent" + + # Create the kernel used by the chat completion agent + kernel = _create_kernel_with_chat_completionand_filter(service_id=service_id) + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) + + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + # Create the agent + agent = ChatCompletionAgent( + service_id=service_id, + kernel=kernel, + name=HOST_NAME, + instructions=HOST_INSTRUCTIONS, + execution_settings=settings, + ) + + # Define the chat history + chat = ChatHistory() + + # Respond to user input + await invoke_agent(agent=agent, input="Hello", chat_history=chat) + await invoke_agent(agent=agent, input="What is the special soup?", chat_history=chat) + await invoke_agent(agent=agent, input="What is the special drink?", chat_history=chat) + await invoke_agent(agent=agent, input="Thank you", chat_history=chat) + + print("================================") + print("CHAT HISTORY") + print("================================") + + # Print out the chat history to view the different types of messages + for message in chat.messages: + _write_content(message) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_agents_plugins.py b/python/samples/concepts/agents/mixed_chat_agents_plugins.py new file mode 100644 index 000000000000..6df7f88cac43 --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat_agents_plugins.py @@ -0,0 +1,118 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI, a chat completion # +# agent and have them participate in a group chat to work towards # +# the user's requirement. The ChatCompletionAgent uses a plugin # +# that is part of the agent group chat. # +##################################################################### + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. Only include the word "approved" if it is so. +If not, provide insight on how to refine suggested copy without example. +You should always tie the conversation back to the food specials offered by the plugin. +""" + +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + + +class MenuPlugin: + """A sample Menu Plugin used for the concept sample.""" + + @kernel_function(description="Provides a list of specials from the menu.") + def get_specials(self) -> Annotated[str, "Returns the specials from the menu."]: + return """ + Special Soup: Clam Chowder + Special Salad: Cobb Salad + Special Drink: Chai Tea + """ + + @kernel_function(description="Provides the price of the requested menu item.") + def get_item_price( + self, menu_item: Annotated[str, "The name of the menu item."] + ) -> Annotated[str, "Returns the price of the menu item."]: + return "$9.99" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + return kernel + + +async def main(): + try: + kernel = _create_kernel_with_chat_completion("artdirector") + settings = kernel.get_prompt_execution_settings_from_service_id(service_id="artdirector") + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + agent_reviewer = ChatCompletionAgent( + service_id="artdirector", + kernel=kernel, + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + execution_settings=settings, + ) + + agent_writer = await OpenAIAssistantAgent.create( + service_id="copywriter", + kernel=Kernel(), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "Write copy based on the food specials." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + async for content in chat.invoke(): + print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + + print(f"# IS COMPLETE: {chat.is_complete}") + finally: + await agent_writer.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/mixed_chat_files.py b/python/samples/concepts/agents/mixed_chat_files.py index b97cce8dd593..b5d21c3fd09f 100644 --- a/python/samples/concepts/agents/mixed_chat_files.py +++ b/python/samples/concepts/agents/mixed_chat_files.py @@ -5,8 +5,8 @@ from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent from semantic_kernel.agents.open_ai import OpenAIAssistantAgent -from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.annotation_content import AnnotationContent from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel import Kernel @@ -19,21 +19,12 @@ ##################################################################### -class ApprovalTerminationStrategy(TerminationStrategy): - """A strategy for determining when an agent should terminate.""" - - async def should_agent_terminate(self, agent, history): - """Check if the agent should terminate.""" - return "approved" in history[-1].content.lower() - - SUMMARY_INSTRUCTIONS = "Summarize the entire conversation for the user in natural language." def _create_kernel_with_chat_completion(service_id: str) -> Kernel: kernel = Kernel() kernel.add_service(AzureChatCompletion(service_id=service_id)) - # kernel.add_service(OpenAIChatCompletion(service_id=service_id)) return kernel @@ -47,6 +38,12 @@ async def invoke_agent( async for content in chat.invoke(agent=agent): print(f"# {content.role} - {content.name or '*'}: '{content.content}'") + if len(content.items) > 0: + for item in content.items: + if isinstance(item, AnnotationContent): + print(f"\n`{item.quote}` => {item.file_id}") + response_content = await agent.client.files.content(item.file_id) + print(response_content.text) async def main(): diff --git a/python/samples/concepts/agents/mixed_chat_streaming.py b/python/samples/concepts/agents/mixed_chat_streaming.py new file mode 100644 index 000000000000..3aac54f3eb45 --- /dev/null +++ b/python/samples/concepts/agents/mixed_chat_streaming.py @@ -0,0 +1,95 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.open_ai import OpenAIAssistantAgent +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +##################################################################### +# The following sample demonstrates how to create an OpenAI # +# assistant using either Azure OpenAI or OpenAI, a chat completion # +# agent and have them participate in a group chat to work towards # +# the user's requirement. # +##################################################################### + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +REVIEWER_NAME = "ArtDirector" +REVIEWER_INSTRUCTIONS = """ +You are an art director who has opinions about copywriting born of a love for David Ogilvy. +The goal is to determine if the given copy is acceptable to print. +If so, state that it is approved. Only include the word "approved" if it is so. +If not, provide insight on how to refine suggested copy without example. +""" + +COPYWRITER_NAME = "CopyWriter" +COPYWRITER_INSTRUCTIONS = """ +You are a copywriter with ten years of experience and are known for brevity and a dry humor. +The goal is to refine and decide on the single best copy as an expert in the field. +Only provide a single proposal per response. +You're laser focused on the goal at hand. +Don't waste time with chit chat. +Consider suggestions when refining an idea. +""" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + try: + agent_reviewer = ChatCompletionAgent( + service_id="artdirector", + kernel=_create_kernel_with_chat_completion("artdirector"), + name=REVIEWER_NAME, + instructions=REVIEWER_INSTRUCTIONS, + ) + + agent_writer = await OpenAIAssistantAgent.create( + service_id="copywriter", + kernel=Kernel(), + name=COPYWRITER_NAME, + instructions=COPYWRITER_INSTRUCTIONS, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + termination_strategy=ApprovalTerminationStrategy(agents=[agent_reviewer], maximum_iterations=10), + ) + + input = "a slogan for a new line of electric cars." + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=input)) + print(f"# {AuthorRole.USER}: '{input}'") + + last_agent = None + async for message in chat.invoke_stream(): + if message.content is not None: + if last_agent != message.name: + print(f"\n# {message.name}: ", end="", flush=True) + last_agent = message.name + print(f"{message.content}", end="", flush=True) + + print() + print(f"# IS COMPLETE: {chat.is_complete}") + finally: + await agent_writer.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/auto_function_calling/anthropic_api_function_calling.py b/python/samples/concepts/auto_function_calling/anthropic_api_function_calling.py new file mode 100644 index 000000000000..a38ba2187ab8 --- /dev/null +++ b/python/samples/concepts/auto_function_calling/anthropic_api_function_calling.py @@ -0,0 +1,203 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os +from functools import reduce +from typing import TYPE_CHECKING + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.anthropic import AnthropicChatCompletion, AnthropicChatPromptExecutionSettings +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.functions import KernelArguments + +if TYPE_CHECKING: + from semantic_kernel.functions import KernelFunction + + +system_message = """ +You are a chat bot. Your name is Mosscap and +you have one goal: figure out what people need. +Your full name, should you need to know it, is +Splendid Speckled Mosscap. You communicate +effectively, but you tend to answer with long +flowery prose. You are also a math wizard, +especially for adding and subtracting. +You also excel at joke telling, where your tone is often sarcastic. +Once you have the answer I am looking for, +you will return a full answer to me as soon as possible. +""" + +# This concept example shows how to handle both streaming and non-streaming responses +# To toggle the behavior, set the following flag accordingly: +stream = False + +kernel = Kernel() + +# Note: the underlying model needs to support function calling. +# https://docs.anthropic.com/en/docs/build-with-claude/tool-use#choosing-a-model +kernel.add_service(AnthropicChatCompletion(service_id="chat", ai_model_id="claude-3-opus-20240229")) + +plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/") +# adding plugins to the kernel +kernel.add_plugin(MathPlugin(), plugin_name="math") +kernel.add_plugin(TimePlugin(), plugin_name="time") + +chat_function = kernel.add_function( + prompt="{{$chat_history}}{{$user_input}}", + plugin_name="ChatBot", + function_name="Chat", +) + +# Enabling or disabling function calling is done by setting the `function_choice_behavior` attribute for the +# prompt execution settings. When the function_call parameter is set to "auto" the model will decide which +# function to use, if any. +# +# There are two ways to define the `function_choice_behavior` parameter: +# 1. Using the type string as `"auto"` or `"required"`. For example: +# configure `function_choice_behavior="auto"` parameter directly in the execution settings. +# 2. Using the FunctionChoiceBehavior class. For example: +# `function_choice_behavior=FunctionChoiceBehavior.Auto()`. +# Both of these configure the `auto` tool_choice and all of the available plugins/functions +# registered on the kernel. If you want to limit the available plugins/functions, you must +# configure the `filters` dictionary attribute for each type of function choice behavior. +# For example: +# +# from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior + +# function_choice_behavior = FunctionChoiceBehavior.Auto( +# filters={"included_functions": ["time-date", "time-time", "math-Add"]} +# ) +# +# The filters attribute allows you to specify either: `included_functions`, `excluded_functions`, +# `included_plugins`, or `excluded_plugins`. + +execution_settings = AnthropicChatPromptExecutionSettings( + service_id="chat", + max_tokens=2000, + temperature=0.7, + top_p=0.8, + function_choice_behavior=FunctionChoiceBehavior.Auto(auto_invoke=True), +) + +history = ChatHistory() + +history.add_system_message(system_message) +history.add_user_message("Hi there, who are you?") +history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") + +arguments = KernelArguments(settings=execution_settings) + + +def print_tool_calls(message: ChatMessageContent) -> None: + # A helper method to pretty print the tool calls from the message. + # This is only triggered if auto invoke tool calls is disabled. + items = message.items + formatted_tool_calls = [] + for i, item in enumerate(items, start=1): + if isinstance(item, FunctionCallContent): + tool_call_id = item.id + function_name = item.name + function_arguments = item.arguments + formatted_str = ( + f"tool_call {i} id: {tool_call_id}\n" + f"tool_call {i} function name: {function_name}\n" + f"tool_call {i} arguments: {function_arguments}" + ) + formatted_tool_calls.append(formatted_str) + if len(formatted_tool_calls) > 0: + print("Tool calls:\n" + "\n\n".join(formatted_tool_calls)) + else: + print("The model used its own knowledge and didn't return any tool calls.") + + +async def handle_streaming( + kernel: Kernel, + chat_function: "KernelFunction", + arguments: KernelArguments, +) -> str | None: + response = kernel.invoke_stream( + chat_function, + return_function_results=False, + arguments=arguments, + ) + + print("Mosscap:> ", end="") + streamed_chunks: list[StreamingChatMessageContent] = [] + result_content = [] + async for message in response: + if not execution_settings.function_choice_behavior.auto_invoke_kernel_functions and isinstance( + message[0], StreamingChatMessageContent + ): + streamed_chunks.append(message[0]) + else: + result_content.append(message[0]) + print(str(message[0]), end="") + + if streamed_chunks: + streaming_chat_message = reduce(lambda first, second: first + second, streamed_chunks) + if hasattr(streaming_chat_message, "content"): + print(streaming_chat_message.content) + print("Auto tool calls is disabled, printing returned tool calls...") + print_tool_calls(streaming_chat_message) + + print("\n") + if result_content: + return "".join([str(content) for content in result_content]) + return None + + +async def chat() -> bool: + try: + user_input = input("User:> ") + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + arguments["user_input"] = user_input + arguments["chat_history"] = history + + if stream: + result = await handle_streaming(kernel, chat_function, arguments=arguments) + else: + result = await kernel.invoke(chat_function, arguments=arguments) + + # If tools are used, and auto invoke tool calls is False, the response will be of type + # ChatMessageContent with information about the tool calls, which need to be sent + # back to the model to get the final response. + function_calls = [item for item in result.value[-1].items if isinstance(item, FunctionCallContent)] + if not execution_settings.function_choice_behavior.auto_invoke_kernel_functions and len(function_calls) > 0: + print_tool_calls(result.value[0]) + return True + + print(f"Mosscap:> {result}") + + history.add_user_message(user_input) + history.add_assistant_message(str(result)) + return True + + +async def main() -> None: + chatting = True + print( + "Welcome to the chat bot!\ + \n Type 'exit' to exit.\ + \n Try a math question to see the function calling in action (i.e. what is 3+3?)." + ) + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/auto_function_calling/azure_python_code_interpreter_function_calling.py b/python/samples/concepts/auto_function_calling/azure_python_code_interpreter_function_calling.py index 09ab58f4b633..18ad9b17afc1 100644 --- a/python/samples/concepts/auto_function_calling/azure_python_code_interpreter_function_calling.py +++ b/python/samples/concepts/auto_function_calling/azure_python_code_interpreter_function_calling.py @@ -1,11 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. import asyncio -import datetime - -from azure.core.credentials import AccessToken -from azure.core.exceptions import ClientAuthenticationError -from azure.identity import DefaultAzureCredential from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( @@ -15,37 +10,9 @@ from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.core_plugins.sessions_python_tool.sessions_python_plugin import SessionsPythonTool from semantic_kernel.core_plugins.time_plugin import TimePlugin -from semantic_kernel.exceptions.function_exceptions import FunctionExecutionException from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.kernel import Kernel -auth_token: AccessToken | None = None - -ACA_TOKEN_ENDPOINT: str = "https://acasessions.io/.default" # nosec - - -async def auth_callback() -> str: - """Auth callback for the SessionsPythonTool. - This is a sample auth callback that shows how to use Azure's DefaultAzureCredential - to get an access token. - """ - global auth_token - current_utc_timestamp = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) - - if not auth_token or auth_token.expires_on < current_utc_timestamp: - credential = DefaultAzureCredential() - - try: - auth_token = credential.get_token(ACA_TOKEN_ENDPOINT) - except ClientAuthenticationError as cae: - err_messages = getattr(cae, "messages", []) - raise FunctionExecutionException( - f"Failed to retrieve the client auth token with messages: {' '.join(err_messages)}" - ) from cae - - return auth_token.token - - kernel = Kernel() service_id = "sessions-tool" @@ -54,9 +21,7 @@ async def auth_callback() -> str: ) kernel.add_service(chat_service) -sessions_tool = SessionsPythonTool( - auth_callback=auth_callback, -) +sessions_tool = SessionsPythonTool() kernel.add_plugin(sessions_tool, "SessionsTool") kernel.add_plugin(TimePlugin(), "Time") diff --git a/python/samples/concepts/auto_function_calling/chat_mistral_ai_api_function_calling.py b/python/samples/concepts/auto_function_calling/chat_mistral_ai_api_function_calling.py new file mode 100644 index 000000000000..489f527852aa --- /dev/null +++ b/python/samples/concepts/auto_function_calling/chat_mistral_ai_api_function_calling.py @@ -0,0 +1,212 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os +from functools import reduce +from typing import TYPE_CHECKING + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.mistral_ai import MistralAIChatCompletion, MistralAIChatPromptExecutionSettings +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.core_plugins.time_plugin import TimePlugin +from semantic_kernel.functions import KernelArguments + +if TYPE_CHECKING: + from semantic_kernel.functions import KernelFunction + + +system_message = """ +You are a chat bot. Your name is Mosscap and +you have one goal: figure out what people need. +Your full name, should you need to know it, is +Splendid Speckled Mosscap. You communicate +effectively, but you tend to answer with long +flowery prose. You are also a math wizard, +especially for adding and subtracting. +You also excel at joke telling, where your tone is often sarcastic. +Once you have the answer I am looking for, +you will return a full answer to me as soon as possible. +""" + +# This concept example shows how to handle both streaming and non-streaming responses +# To toggle the behavior, set the following flag accordingly: +stream = True + +kernel = Kernel() + +# Note: the underlying Model must be Mistral Small, Mistral Large, Mixtral 8x22B, Mistral Nemo. +# You can use MISTRALAI_API_KEY and MISTRALAI_CHAT_MODEL_ID environment variables to set the API key and model ID. +# Or just set it here in the Constructor for testing +kernel.add_service( + MistralAIChatCompletion( + service_id="chat", + # api_key=XXXXXXX, + # ai_model_id="mistral-large", + ) +) + +plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/") +# adding plugins to the kernel +kernel.add_plugin(MathPlugin(), plugin_name="math") +kernel.add_plugin(TimePlugin(), plugin_name="time") + +chat_function = kernel.add_function( + prompt="{{$chat_history}}{{$user_input}}", + plugin_name="ChatBot", + function_name="Chat", +) + +# Enabling or disabling function calling is done by setting the `function_choice_behavior` attribute for the +# prompt execution settings. When the function_call parameter is set to "auto" the model will decide which +# function to use, if any. +# +# There are two ways to define the `function_choice_behavior` parameter: +# 1. Using the type string as `"auto"`, `"required"`, or `"none"`. For example: +# configure `function_choice_behavior="auto"` parameter directly in the execution settings. +# 2. Using the FunctionChoiceBehavior class. For example: +# `function_choice_behavior=FunctionChoiceBehavior.Auto()`. +# Both of these configure the `auto` tool_choice and all of the available plugins/functions +# registered on the kernel. If you want to limit the available plugins/functions, you must +# configure the `filters` dictionary attribute for each type of function choice behavior. +# For example: +# +# from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior + +# function_choice_behavior = FunctionChoiceBehavior.Auto( +# filters={"included_functions": ["time-date", "time-time", "math-Add"]} +# ) +# +# The filters attribute allows you to specify either: `included_functions`, `excluded_functions`, +# `included_plugins`, or `excluded_plugins`. + +# Note: the number of responses for auto invoking tool calls is limited to 1. +# If configured to be greater than one, this value will be overridden to 1. +execution_settings = MistralAIChatPromptExecutionSettings( + service_id="chat", + max_tokens=2000, + temperature=0.7, + top_p=0.8, + function_choice_behavior=FunctionChoiceBehavior.Auto(auto_invoke=True), +) + +history = ChatHistory() + +history.add_system_message(system_message) +history.add_user_message("Hi there, who are you?") +history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") + +arguments = KernelArguments(settings=execution_settings) + + +def print_tool_calls(message: ChatMessageContent) -> None: + # A helper method to pretty print the tool calls from the message. + # This is only triggered if auto invoke tool calls is disabled. + items = message.items + formatted_tool_calls = [] + for i, item in enumerate(items, start=1): + if isinstance(item, FunctionCallContent): + tool_call_id = item.id + function_name = item.name + function_arguments = item.arguments + formatted_str = ( + f"tool_call {i} id: {tool_call_id}\n" + f"tool_call {i} function name: {function_name}\n" + f"tool_call {i} arguments: {function_arguments}" + ) + formatted_tool_calls.append(formatted_str) + if len(formatted_tool_calls) > 0: + print("Tool calls:\n" + "\n\n".join(formatted_tool_calls)) + else: + print("The model used its own knowledge and didn't return any tool calls.") + + +async def handle_streaming( + kernel: Kernel, + chat_function: "KernelFunction", + arguments: KernelArguments, +) -> str | None: + response = kernel.invoke_stream( + chat_function, + return_function_results=False, + arguments=arguments, + ) + + print("Mosscap:> ", end="") + streamed_chunks: list[StreamingChatMessageContent] = [] + result_content = [] + async for message in response: + if not execution_settings.function_choice_behavior.auto_invoke_kernel_functions and isinstance( + message[0], StreamingChatMessageContent + ): + streamed_chunks.append(message[0]) + else: + result_content.append(message[0]) + print(str(message[0]), end="") + + if streamed_chunks: + streaming_chat_message = reduce(lambda first, second: first + second, streamed_chunks) + if hasattr(streaming_chat_message, "content"): + print(streaming_chat_message.content) + print("Auto tool calls is disabled, printing returned tool calls...") + print_tool_calls(streaming_chat_message) + + print("\n") + if result_content: + return "".join([str(content) for content in result_content]) + return None + + +async def chat() -> bool: + try: + user_input = input("User:> ") + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + arguments["user_input"] = user_input + arguments["chat_history"] = history + + if stream: + result = await handle_streaming(kernel, chat_function, arguments=arguments) + else: + result = await kernel.invoke(chat_function, arguments=arguments) + + # If tools are used, and auto invoke tool calls is False, the response will be of type + # ChatMessageContent with information about the tool calls, which need to be sent + # back to the model to get the final response. + function_calls = [item for item in result.value[-1].items if isinstance(item, FunctionCallContent)] + if not execution_settings.function_choice_behavior.auto_invoke_kernel_functions and len(function_calls) > 0: + print_tool_calls(result.value[0]) + return True + + print(f"Mosscap:> {result}") + + history.add_user_message(user_input) + history.add_assistant_message(str(result)) + return True + + +async def main() -> None: + chatting = True + print( + "Welcome to the chat bot!\ + \n Type 'exit' to exit.\ + \n Try a math question to see the function calling in action (i.e. what is 3+3?)." + ) + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/auto_function_calling/parallel_function_calling.py b/python/samples/concepts/auto_function_calling/parallel_function_calling.py new file mode 100644 index 000000000000..33e76b700584 --- /dev/null +++ b/python/samples/concepts/auto_function_calling/parallel_function_calling.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import logging +import sys +import time +from typing import Annotated + +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.kernel import Kernel + +# This sample demonstrates how the kernel will execute functions in parallel. +# The output of this sample should look similar to the following: +# +# [2024-09-11 10:15:35.070 INFO] processing 2 tool calls in parallel. +# The employee with ID 123 is named John Doe and they are 30 years old. +# Time elapsed: 11.96s +# +# The mock plugin simulates a long-running operation to fetch the employee's name and age. +# When you run the sample, you should see the total execution time is less than the sum +# of the two function calls because the kernel executes the functions in parallel. + +# This concept example shows how to handle both streaming and non-streaming responses +# To toggle the behavior, set the following flag accordingly: +stream = True + + +def set_up_logging(): + """Set up logging to verify the kernel execute the functions in parallel""" + root_logger = logging.getLogger() + root_logger.setLevel(logging.INFO) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.INFO) + handler.setFormatter( + logging.Formatter("[%(asctime)s.%(msecs)03d %(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S"), + ) + # Print only the logs from the chat completion client to reduce the output of the sample + handler.addFilter(lambda record: record.name == "semantic_kernel.connectors.ai.chat_completion_client_base") + + root_logger.addHandler(handler) + + +class EmployeePlugin: + """A mock plugin to simulate a plugin that fetches employee information""" + + @kernel_function(name="get_name", description="Find the name of the employee by the id") + async def get_name( + self, id: Annotated[str, "The ID of the employee"] + ) -> Annotated[str, "The name of the employee"]: + # Simulate a long-running operation + await asyncio.sleep(10) + return "John Doe" + + @kernel_function(name="get_age", description="Get the age of the employee by the id") + async def get_age(self, id: Annotated[str, "The ID of the employee"]) -> Annotated[int, "The age of the employee"]: + # Simulate a long-running operation + await asyncio.sleep(10) + return 30 + + +async def main(): + kernel = Kernel() + kernel.add_service(OpenAIChatCompletion(service_id="open_ai")) + kernel.add_plugin(EmployeePlugin(), "EmployeePlugin") + + # With this query, the model will call the get_name and get_age functions in parallel. + # Note that for certain queries, the model may choose to call the functions sequentially. + # For example, if the available functions are `get_email_by_id` and `get_name_by_email`, + # the model will not be able to call them in parallel because the second function depends + # on the result of the first function. + query = "What is the name and age of the employee of ID 123?" + arguments = KernelArguments( + settings=PromptExecutionSettings( + # Set the function_choice_behavior to auto to let the model + # decide which function to use, and let the kernel automatically + # execute the functions. + function_choice_behavior=FunctionChoiceBehavior.Auto(), + ) + ) + + start = time.perf_counter() + + if stream: + async for result in kernel.invoke_prompt_stream(query, arguments=arguments): + print(str(result[0]), end="") + print() + else: + result = await kernel.invoke_prompt(query, arguments=arguments) + print(result) + + print(f"Time elapsed: {time.perf_counter() - start:.2f}s") + + +if __name__ == "__main__": + set_up_logging() + + asyncio.run(main()) diff --git a/python/samples/concepts/chat_completion/chat_anthropic_api.py b/python/samples/concepts/chat_completion/chat_anthropic_api.py index 4cfb4e277b1f..b5caa1e891c1 100644 --- a/python/samples/concepts/chat_completion/chat_anthropic_api.py +++ b/python/samples/concepts/chat_completion/chat_anthropic_api.py @@ -21,6 +21,7 @@ kernel.add_service(AnthropicChatCompletion(service_id=service_id, ai_model_id="claude-3-opus-20240229")) settings = kernel.get_prompt_execution_settings_from_service_id(service_id) +settings.system = system_message settings.max_tokens = 2000 settings.temperature = 0.7 settings.top_p = 0.8 @@ -33,7 +34,7 @@ prompt_execution_settings=settings, ) -chat_history = ChatHistory(system_message=None) +chat_history = ChatHistory() chat_history.add_user_message("Hi there, who are you?") chat_history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need") @@ -61,7 +62,7 @@ async def chat() -> bool: ) print("Mosscap:> ", end="") async for message in answer: - print(str(message[0]), end="") + print(str(message[0]), end="", flush=True) print("\n") return True answer = await kernel.invoke( diff --git a/python/samples/concepts/memory/new_memory.py b/python/samples/concepts/memory/new_memory.py index 7ab149fde00a..a76716659ad2 100644 --- a/python/samples/concepts/memory/new_memory.py +++ b/python/samples/concepts/memory/new_memory.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. - +import argparse +from collections.abc import Callable from dataclasses import dataclass, field from typing import Annotated from uuid import uuid4 @@ -9,7 +10,9 @@ from semantic_kernel import Kernel from semantic_kernel.connectors.ai.open_ai import OpenAIEmbeddingPromptExecutionSettings, OpenAITextEmbedding +from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding from semantic_kernel.connectors.memory.azure_ai_search import AzureAISearchCollection +from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.connectors.memory.qdrant import QdrantCollection from semantic_kernel.connectors.memory.redis import RedisHashsetCollection, RedisJsonCollection from semantic_kernel.connectors.memory.volatile import VolatileCollection @@ -65,43 +68,58 @@ class MyDataModelList: ] = "content1" -# configuration -# specify which store (redis_json, redis_hash, qdrant, Azure AI Search or volatile) to use -# and which model (vectors as list or as numpy arrays) -store = "volatile" collection_name = "test" MyDataModel = MyDataModelArray -stores: dict[str, VectorStoreRecordCollection] = { - "ai_search": AzureAISearchCollection[MyDataModel]( +# A list of VectorStoreRecordCollection that can be used. +# Available stores are: +# - ai_search: Azure AI Search +# - postgres: PostgreSQL +# - redis_json: Redis JSON +# - redis_hashset: Redis Hashset +# - qdrant: Qdrant +# - volatile: In-memory store +# +# This is represented as a mapping from the store name to a +# function which returns the store. +# Using a function allows for lazy initialization of the store, +# so that settings for unused stores do not cause validation errors. +stores: dict[str, Callable[[], VectorStoreRecordCollection]] = { + "ai_search": lambda: AzureAISearchCollection[MyDataModel]( + data_model_type=MyDataModel, + ), + "postgres": lambda: PostgresCollection[str, MyDataModel]( data_model_type=MyDataModel, + collection_name=collection_name, ), - "redis_json": RedisJsonCollection[MyDataModel]( + "redis_json": lambda: RedisJsonCollection[MyDataModel]( data_model_type=MyDataModel, collection_name=collection_name, prefix_collection_name_to_key_names=True, ), - "redis_hashset": RedisHashsetCollection[MyDataModel]( + "redis_hashset": lambda: RedisHashsetCollection[MyDataModel]( data_model_type=MyDataModel, collection_name=collection_name, prefix_collection_name_to_key_names=True, ), - "qdrant": QdrantCollection[MyDataModel]( + "qdrant": lambda: QdrantCollection[MyDataModel]( data_model_type=MyDataModel, collection_name=collection_name, prefer_grpc=True, named_vectors=False ), - "volatile": VolatileCollection[MyDataModel]( + "volatile": lambda: VolatileCollection[MyDataModel]( data_model_type=MyDataModel, collection_name=collection_name, ), } -async def main(): +async def main(store: str, use_azure_openai: bool, embedding_model: str): kernel = Kernel() service_id = "embedding" - ai_model_id = "text-embedding-3-small" - kernel.add_service(OpenAITextEmbedding(service_id=service_id, ai_model_id=ai_model_id)) - async with stores[store] as record_store: + if use_azure_openai: + kernel.add_service(AzureTextEmbedding(service_id=service_id, deployment_name=embedding_model)) + else: + kernel.add_service(OpenAITextEmbedding(service_id=service_id, ai_model_id=embedding_model)) + async with stores[store]() as record_store: await record_store.create_collection_if_not_exists() record1 = MyDataModel(content="My text", id="e6103c03-487f-4d7d-9c23-4723651c17f4") @@ -127,4 +145,16 @@ async def main(): if __name__ == "__main__": import asyncio - asyncio.run(main()) + argparse.ArgumentParser() + + parser = argparse.ArgumentParser() + parser.add_argument("--store", default="volatile", choices=stores.keys(), help="What store to use.") + # Option of whether to use OpenAI or Azure OpenAI. + parser.add_argument("--use-azure-openai", action="store_true", help="Use Azure OpenAI instead of OpenAI.") + # Model + parser.add_argument( + "--model", default="text-embedding-3-small", help="The model or deployment to use for embeddings." + ) + args = parser.parse_args() + + asyncio.run(main(store=args.store, use_azure_openai=args.use_azure_openai, embedding_model=args.model)) diff --git a/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py b/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py index c98efa3a5a5c..6627a2a7fb26 100644 --- a/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py +++ b/python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py @@ -20,8 +20,8 @@ async def main(): ) plugin_path = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), - "resources", + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "resources", ) kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin") diff --git a/python/samples/concepts/plugins/openapi/README.md b/python/samples/concepts/plugins/openapi/README.md index 4688b77be5f7..e93ebe0dca91 100644 --- a/python/samples/concepts/plugins/openapi/README.md +++ b/python/samples/concepts/plugins/openapi/README.md @@ -1,8 +1,10 @@ ### Running the OpenAPI syntax example +For more generic setup instructions, including how to install the `uv` tool, see the [main README](../../../../DEV_SETUP.md). + 1. In a terminal, navigate to `semantic_kernel/python/samples/kernel-syntax-examples/openapi_example`. -2. Run `poetry install` followed by `poetry shell` to enter poetry's virtual environment. +2. Run `uv sync` followed by `source .venv/bin/activate` to enter the virtual environment (depending on the os, the activate script may be in a different location). 3. Start the server by running `python openapi_server.py`. diff --git a/python/samples/concepts/setup/ALL_SETTINGS.md b/python/samples/concepts/setup/ALL_SETTINGS.md index 2a0e9b6fb80e..ea9e1db6ff74 100644 --- a/python/samples/concepts/setup/ALL_SETTINGS.md +++ b/python/samples/concepts/setup/ALL_SETTINGS.md @@ -14,6 +14,10 @@ OpenAI | [OpenAIChatCompletion](../../../semantic_kernel/connectors/ai/open_ai/s | | | ai_model_id | OPENAI_EMBEDDING_MODEL_ID | Yes | | | api_key | OPENAI_API_KEY | Yes | | | org_id | OPENAI_ORG_ID | No +| | [OpenAITextToImage](../../../semantic_kernel/connectors/ai/open_ai/services/open_ai_text_to_image.py) +| | | ai_model_id | OPENAI_TEXT_TO_IMAGE_MODEL_ID | Yes +| | | api_key | OPENAI_API_KEY | Yes +| | | org_id | OPENAI_ORG_ID | No Azure OpenAI | [AzureOpenAIChatCompletion](../../../semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py) | | | | [AzureOpenAISettings](../../../semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py) | | | deployment_name | AZURE_OPENAI_CHAT_DEPLOYMENT_NAME | Yes | | | api_key | AZURE_OPENAI_API_KEY | Yes @@ -32,6 +36,10 @@ Azure OpenAI | [AzureOpenAIChatCompletion](../../../semantic_kernel/connectors/a | | | endpoint | AZURE_OPENAI_ENDPOINT | Yes | | | api_version | AZURE_OPENAI_API_VERSION | Yes | | | base_url | AZURE_OPENAI_BASE_URL | Yes +| | [AzureTextToImage](../../../semantic_kernel/connectors/ai/open_ai/services/azure_text_to_image.py) +| | | deployment_name | AZURE_OPENAI_TEXT_TO_IMAGE_DEPLOYMENT_NAME | Yes +| | | api_key | AZURE_OPENAI_API_KEY | Yes +| | | endpoint | AZURE_OPENAI_ENDPOINT | Yes ## Memory Service Settings used across SK: diff --git a/python/samples/concepts/structured_output/README.md b/python/samples/concepts/structured_output/README.md new file mode 100644 index 000000000000..50d20964f881 --- /dev/null +++ b/python/samples/concepts/structured_output/README.md @@ -0,0 +1,16 @@ +# OpenAI Structured Outputs + +## Supported Models + +### Azure OpenAI: + +- Access to `gpt-4o-2024-08-06` or later +- The `2024-08-01-preview` API version +- If using a token instead of an API key, you must have the `Cognitive Services OpenAI Contributor` role assigned to your Azure AD user. +- See more information [here](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/structured-outputs?tabs=python-secure) + +### OpenAI: + +- The OpenAI models supported are: + - `gpt-4o-mini-2024-07-18` and later + - `gpt-4o-2024-08-06` and later diff --git a/python/samples/concepts/structured_output/json_structured_output.py b/python/samples/concepts/structured_output/json_structured_output.py new file mode 100644 index 000000000000..f6ea600cd56f --- /dev/null +++ b/python/samples/concepts/structured_output/json_structured_output.py @@ -0,0 +1,135 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + +################################################################### +# The following sample demonstrates how to create a chat # +# completion call that assists users in solving math problems. # +# The bot guides the user step-by-step through the solution # +# process using a structured output format based on either a # +# Pydantic model or a non-Pydantic model. # +################################################################### + + +################################################################### +# NOTE: If using Azure OpenAI the the following is required: +# - access to gpt-4o-2024-08-06 +# - the 2024-08-01-preview API version +# - if using a token instead of an API KEY, you must have the +# `Cognitive Services OpenAI Contributor` role assigned to your +# Azure AD user. +# - flip the `use_azure_openai` flag to `True` +################################################################### +use_azure_openai = False + +system_message = """ +You are a helpful math tutor. Guide the user through the solution step by step. +""" + + +################################################################### +# OPTION 1: Define the Pydantic model that represents the +# structured output from the OpenAI service. This model will be +# used to parse the structured output from the OpenAI service, +# and ensure that the model correctly outputs the schema based +# on the Pydantic model. +from semantic_kernel.kernel_pydantic import KernelBaseModel # noqa: E402 + + +class Step(KernelBaseModel): + explanation: str + output: str + + +class Reasoning(KernelBaseModel): + steps: list[Step] + final_answer: str + + +################################################################### + + +# OPTION 2: Define a non-Pydantic model that should represent the +# structured output from the OpenAI service. This model will be +# converted to the proper JSON Schema and sent to the LLM. +# Uncomment the follow lines and comment out the Pydantic model +# above to use this option. +# class Step: +# explanation: str +# output: str + + +# class Reasoning: +# steps: list[Step] +# final_answer: str + + +################################################################### + +kernel = Kernel() + +service_id = "structured-output" +if use_azure_openai: + chat_service = AzureChatCompletion( + service_id=service_id, + ) +else: + chat_service = OpenAIChatCompletion( + service_id=service_id, + ) +kernel.add_service(chat_service) + +req_settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) +req_settings.max_tokens = 2000 +req_settings.temperature = 0.7 +req_settings.top_p = 0.8 +req_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["chat"]}) + +# NOTE: This is the key setting in this example that tells the OpenAI service +# to return structured output based on the Pydantic model Reasoning. +req_settings.response_format = Reasoning + + +chat_function = kernel.add_function( + prompt=system_message + """{{$chat_history}}""", + function_name="chat", + plugin_name="chat", + prompt_execution_settings=req_settings, +) + +history = ChatHistory() +history.add_user_message("how can I solve 8x + 7y = -23, and 4x=12?") + + +async def main(): + stream = True + if stream: + answer = kernel.invoke_stream( + chat_function, + chat_history=history, + ) + print("Mosscap:> ", end="") + result_content: list[StreamingChatMessageContent] = [] + async for message in answer: + result_content.append(message[0]) + print(str(message[0]), end="", flush=True) + if result_content: + result = "".join([str(content) for content in result_content]) + else: + result = await kernel.invoke( + chat_function, + chat_history=history, + ) + print(f"Mosscap:> {result}") + history.add_assistant_message(str(result)) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/structured_output/json_structured_output_function_calling.py b/python/samples/concepts/structured_output/json_structured_output_function_calling.py new file mode 100644 index 000000000000..d2091dbf5f73 --- /dev/null +++ b/python/samples/concepts/structured_output/json_structured_output_function_calling.py @@ -0,0 +1,163 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +from typing import Annotated + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.contents import ChatHistory +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +################################################################### +# The following sample demonstrates how to create a chat # +# completion call that assists users in solving a question # +# using a Semantic Kernel Plugin and function calling problems. # +# The chat plugin guides the user step-by-step through the # +# solution process using a structured output format based on # +# either a Pydantic model or a non-Pydantic model # +################################################################### + + +################################################################### +# NOTE: If using Azure OpenAI the the following is required: +# - access to gpt-4o-2024-08-06 +# - the 2024-08-01-preview API version +# - if using a token instead of an API KEY, you must have the +# `Cognitive Services OpenAI Contributor` role assigned to your +# Azure AD user. +# - flip the `use_azure_openai` flag to `True` +################################################################### +use_azure_openai = True + +system_message = """ +You are a helpful math tutor. Guide the user through the solution step by step. +""" + + +# Define a sample plugin to use for function calling +class WeatherPlugin: + """A sample plugin that provides weather information for cities.""" + + @kernel_function(name="get_weather_for_city", description="Get the weather for a city") + def get_weather_for_city(self, city: Annotated[str, "The input city"]) -> Annotated[str, "The output is a string"]: + if city == "Boston": + return "61 and rainy" + if city == "London": + return "55 and cloudy" + if city == "Miami": + return "80 and sunny" + if city == "Paris": + return "60 and rainy" + if city == "Tokyo": + return "50 and sunny" + if city == "Sydney": + return "75 and sunny" + if city == "Tel Aviv": + return "80 and sunny" + return "31 and snowing" + + +################################################################### +# OPTION 1: Define the Pydantic model that represents the +# structured output from the OpenAI service. This model will be +# used to parse the structured output from the OpenAI service, +# and ensure that the model correctly outputs the schema based +# on the Pydantic model. +from semantic_kernel.kernel_pydantic import KernelBaseModel # noqa: E402 + + +class Step(KernelBaseModel): + explanation: str + output: str + + +class Reasoning(KernelBaseModel): + steps: list[Step] + final_answer: str + + +################################################################### + + +# OPTION 2: Define a non-Pydantic model that should represent the +# structured output from the OpenAI service. This model will be +# converted to the proper JSON Schema and sent to the LLM. +# Uncomment the follow lines and comment out the Pydantic model +# above to use this option. +# class Step: +# explanation: str +# output: str + + +# class Reasoning: +# steps: list[Step] +# final_answer: str + + +################################################################### + +kernel = Kernel() + +service_id = "structured-output" +if use_azure_openai: + chat_service = AzureChatCompletion( + service_id=service_id, + ) +else: + chat_service = OpenAIChatCompletion( + service_id=service_id, + ) +kernel.add_service(chat_service) + +kernel.add_plugin(WeatherPlugin(), plugin_name="weather") + +req_settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) +req_settings.max_tokens = 2000 +req_settings.temperature = 0.7 +req_settings.top_p = 0.8 +req_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["chat"]}) + +# NOTE: This is the key setting in this example that tells the OpenAI service +# to return structured output based on the Pydantic model Reasoning. +req_settings.response_format = Reasoning + + +chat_function = kernel.add_function( + prompt=system_message + """{{$chat_history}}""", + function_name="chat", + plugin_name="chat", + prompt_execution_settings=req_settings, +) + +history = ChatHistory() +history.add_user_message("Using the available plugin, what is the weather in Paris?") + + +async def main(): + stream = True + if stream: + answer = kernel.invoke_stream( + chat_function, + chat_history=history, + ) + print("Mosscap:> ", end="") + result_content: list[StreamingChatMessageContent] = [] + async for message in answer: + result_content.append(message[0]) + print(str(message[0]), end="", flush=True) + if result_content: + result = "".join([str(content) for content in result_content]) + else: + result = await kernel.invoke( + chat_function, + chat_history=history, + ) + print(f"Mosscap:> {result}") + history.add_assistant_message(str(result)) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/demos/README.md b/python/samples/demos/README.md new file mode 100644 index 000000000000..1387b06b0d5a --- /dev/null +++ b/python/samples/demos/README.md @@ -0,0 +1,9 @@ +## Semantic Kernel Demo Applications + +Demonstration applications that leverage the usage of one or many SK features + +| Type | Description | +| ----------------- | ----------------------------------------------- | +| assistants_group_chat | A sample Agent demo that shows a chat functionality with an OpenAI Assistant agent. | +| booking_restaurant | A sample chat bot that leverages the Microsoft Graph and Bookings API as a Semantic Kernel plugin to make a fake booking at a restaurant. | +| telemetry_with_application_insights | A sample project that shows how a Python application can be configured to send Semantic Kernel telemetry to Application Insights. | \ No newline at end of file diff --git a/python/samples/demos/guided_conversations/README.md b/python/samples/demos/guided_conversations/README.md new file mode 100644 index 000000000000..858336b2b5ea --- /dev/null +++ b/python/samples/demos/guided_conversations/README.md @@ -0,0 +1,61 @@ +# Guided Conversations +This sample highlights a framework for a pattern of use cases we refer to as guided conversations. +These are scenarios where an agent with a goal and constraints leads a conversation. There are many of these scenarios where we hold conversations that are driven by an objective and constraints. For example: +- a teacher guiding a student through a lesson +- a call center representative collecting information about a customer's issue +- a sales representative helping a customer find the right product for their specific needs +- an interviewer asking candidate a series of questions to assess their fit for a role +- a nurse asking a series of questions to triage the severity of a patient's symptoms +- a meeting where participants go around sharing their updates and discussing next steps + +The common thread between all these scenarios is that they are between a **creator** leading the conversation and a **user(s)** who are participating. +The creator defines the goals, a plan for how the conversation should flow, and often collects key information through a form throughout the conversation. +They must exercise judgment to navigate and adapt the conversation towards achieving the set goal all while writing down key information and planning in advance. + +The goal of this framework is to show how we can build a common framework to create AI agents that can assist a creator in running conversational scenarios semi-autonomously and generating **artifacts** like notes, forms, and plans that can be used to track progress and outcomes. A key tenant of this framework is the following principal: *think with the model, plan with the code*. This means that the model is used to understand user inputs and make complex decisions, but code is used to apply constraints and provide structure to make the system **reliable**. To better understand this concept, start with the [notebooks](./notebooks/). + + +## Features +We were motivated to create this sample while noticing some common challenges with using agents for conversation scenarios: +| Common Challenges | Guided Conversations | +| --------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Focus - Drift from their original goals | Define the agent's goal in terms of completing an ["artifact"](./guided_conversation/plugins/artifact.py), which is a precise representation of what the agent needs to do in the conversation | +| Pacing - Rushing through conversations, being overly verbose, and struggle to understand time | Encourage the agent to regularly update an [agenda](./guided_conversation/plugins/agenda.py) where each agenda item is allocated an estimated number of times, time limits are programmatically validated, and programmatically convert time-based units (e.g. seconds, minutes) to turns using [resource constraints](./guided_conversation/utils/resources.py) | +| Downstream Use Cases - Difficult to use chat logs for further processing or analysis | The [artifact](./guided_conversation/plugins/artifact.py) serves as (1) a structured record of the conversation that can be more easily analyzed afterward, (2) a way to monitor the agent's progress in real-time | + + +## Installation +This sample uses the same tooling as the [Semantic Kernel](https://github.com/microsoft/semantic-kernel/blob/main/python/pyproject.toml) Python source which uses [poetry](https://python-poetry.org/docs/) to install dependencies for development. + +1. `poetry install` +1. Activate `.venv` that was created by poetry +1. Set up the environment variables or a `.env` file for the LLM service you want to use. +1. If you add new dependencies to the `pyproject.toml` file; run `poetry update`. + + +### Quickstart +1. Fork the repository. +1. Install dependencies (see Installation) & set up environment variables +1. Try the [01_guided_conversation_teaching.ipynb](./notebooks/01_guided_conversation_teaching.ipynb) as an example. +1. For best quality and reliability, we recommend using the `gpt-4-1106-preview` or `gpt-4o` models since this sample requires complex reasoning and function calling abilities. + + +## How You Can Use This Framework +### Add a new scenario +Create a new file and and define the following inputs: +- An artifact +- Rules +- Conversation flow (optional) +- Context (optional) +- Resource constraint (optional) + +See the [interactive script](./interactive_guided_conversation.py) for an example. + +### Editing Existing Plugins +Edit plugins at [plugins](./guided_conversation/plugins/) + +### Editing the Orchestrator +Go to [guided_conversation_agent.py](./guided_conversation/plugins/guided_conversation_agent.py). + +### Reusing Plugins +We also encourage the open source community to pull in the artifact and agenda plugins to accelerate existing work. We believe that these plugins alone can improve goal-following in other agents. diff --git a/python/samples/demos/guided_conversations/guided_conversation/__init__.py b/python/samples/demos/guided_conversations/guided_conversation/__init__.py new file mode 100644 index 000000000000..2a50eae89411 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Microsoft. All rights reserved. diff --git a/python/samples/demos/guided_conversations/guided_conversation/functions/__init__.py b/python/samples/demos/guided_conversations/guided_conversation/functions/__init__.py new file mode 100644 index 000000000000..2a50eae89411 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/functions/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Microsoft. All rights reserved. diff --git a/python/samples/demos/guided_conversations/guided_conversation/functions/conversation_plan.py b/python/samples/demos/guided_conversations/guided_conversation/functions/conversation_plan.py new file mode 100644 index 000000000000..8f2259733d53 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/functions/conversation_plan.py @@ -0,0 +1,229 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging + +from semantic_kernel import Kernel +from semantic_kernel.functions import FunctionResult, KernelArguments + +from guided_conversation.plugins.agenda import Agenda +from guided_conversation.plugins.artifact import Artifact +from guided_conversation.utils.conversation_helpers import Conversation +from guided_conversation.utils.resources import GCResource, ResourceConstraintMode + +logger = logging.getLogger(__name__) + +conversation_plan_template = """You are a helpful, thoughtful, and meticulous assistant. +You are conducting a conversation with a user. \ +Your goal is to complete an artifact as thoroughly as possible by the end of the conversation, and to ensure a smooth experience for the user. + +This is the schema of the artifact you are completing: +{{ artifact_schema }}{{#if context}} + +Here is some additional context about the conversation: +{{ context }}{{/if}} + +Throughout the conversation, you must abide by these rules: +{{ rules }}{{#if current_state_description }} + +Here's a description of the conversation flow: +{{ current_state_description }} +Follow this description, and exercise good judgment about when it is appropriate to deviate.{{/if}} + +You will be provided the history of your conversation with the user up until now and the current state of the artifact. +Note that if the value for a field in the artifact is 'Unanswered', it means that the field has not been completed. +You need to select the best possible action(s), given the state of the conversation and the artifact. +These are the possible actions you can take: +{{#if show_agenda}}Update agenda (required parameters: items) +- If the latest agenda is set to "None", you should always pick this action. +- You should pick this action if you need to change your plan for the conversation to make the best use of the remaining turns available to you. \ +Consider how long it usually takes to get the information you need (which is a function of the quality and pace of the user's responses), \ +the number, complexity, and importance of the remaining fields in the artifact, and the number of turns remaining ({{ remaining_resource }}). \ +Based on these factors, you might need to accelerate (e.g. combine several topics) or slow down the conversation (e.g. spread out a topic), in which case you should update the agenda accordingly. \ +Note that skipping an artifact field is NOT a valid way to accelerate the conversation. +- You must provide an ordered list of items to be completed sequentially, where the first item contains everything you will do in the current turn of the conversation (in addition to updating the agenda). \ +For example, if you choose to send a message to the user asking for their name and medical history, then you would write "ask for name and medical history" as the first item. \ +If you think medical history will take longer than asking for the name, then you would write "complete medical history" as the second item, with an estimate of how many turns you think it will take. \ +Do NOT include items that have already been completed. \ +Items must always represent a conversation topic (corresponding to the "Send message to user" action). Updating the artifact (e.g. "update field X based on the discussion") or terminating the conversation is NOT a valid item. +- The latest agenda was created in the previous turn of the conversation. \ +Even if the total turns in the latest agenda equals the remaining turns, you should still update the agenda if you think the current plan is suboptimal (e.g. the first item was completed, the order of items is not ideal, an item is too broad or not a conversation topic, etc.). +- Each item must have a description and and your best guess for the number of turns required to complete it. Do not provide a range of turns. \ +It is EXTREMELY important that the total turns allocated across all items in the updated agenda (including the first item for the current turn) {{ total_resource_str }} \ +Everything in the agenda should be something you expect to complete in the remaining turns - there shouldn't be any optional "buffer" items. \ +It can be helpful to include the cumulative turns allocated for each item in the agenda to ensure you adhere to this rule, e.g. item 1 = 2 turns (cumulative total = 2), item 2 = 4 turns (cumulative total = 6), etc. +- Avoid high-level items like "ask follow-up questions" - be specific about what you need to do. +- Do NOT include wrap-up items such as "review and confirm all information with the user" (you should be doing this throughout the conversation) or "thank the user for their time". \ +Do NOT repeat topics that have already been sufficiently addressed. {{ ample_time_str }}{{/if}} + +Send message to user (required parameters: message) +- If there is no conversation history, you should always pick this action. +- You should pick this action if (a) the user asked a question or made a statement that you need to respond to, \ +or (b) you need to follow-up with the user because the information they provided is incomplete, invalid, ambiguous, or in some way insufficient to complete the artifact. \ +For example, if the artifact schema indicates that the "date of birth" field must be in the format "YYYY-MM-DD", but the user has only provided the month and year, you should send a message to the user asking for the day. \ +Likewise, if the user claims that their date of birth is February 30, you should send a message to the user asking for a valid date. \ +If the artifact schema is open-ended (e.g. it asks you to rate how pressing the user's issue is, without specifying rules for doing so), use your best judgment to determine whether you have enough information or you need to continue probing the user. \ +It's important to be thorough, but also to avoid asking the user for unnecessary information. + +Update artifact fields (required parameters: field, value) +- You should pick this action as soon as (a) the user provides new information that is not already reflected in the current state of the artifact and (b) you are able to submit a valid value for a field in the artifact using this new information. \ +If you have already updated a field in the artifact and there is no new information to update the field with, you should not pick this action. +- Make sure the value adheres to the constraints of the field as specified in the artifact schema. +- If the user has provided all required information to complete a field (i.e. the criteria for "Send message to user" are not satisfied) but the information is in the wrong format, you should not ask the user to reformat their response. \ +Instead, you should simply update the field with the correctly formatted value. For example, if the artifact asks for the date of birth in the format "YYYY-MM-DD", and the user provides their date of birth as "June 15, 2000", you should update the field with the value "2000-06-15". +- Prioritize accuracy over completion. You should never make up information or make assumptions in order to complete a field. \ +For example, if the field asks for a 10-digit phone number, and the user provided a 9-digit phone number, you should not add a digit to the phone number in order to complete the field. \ +Instead, you should follow-up with the user to ask for the correct phone number. If they still aren't able to provide one, you should leave the field unanswered. +- If the user isn't able to provide all of the information needed to complete a field, \ +use your best judgment to determine if a partial answer is appropriate (assuming it adheres to the formatting requirements of the field). \ +For example, if the field asks for a description of symptoms along with details about when the symptoms started, but the user isn't sure when their symptoms started, \ +it's better to record the information they do have rather than to leave the field unanswered (and to indicate that the user was unsure about the start date). +- If it's possible to update multiple fields at once (assuming you're adhering to the above rules in all cases), you should do so. \ +For example, if the user provides their full name and date of birth in the same message, you should select the "update artifact fields" action twice, once for each field. + +End conversation (required parameters: None) +{{ termination_instructions }} +{{ resource_instructions }} + +If you select the "Update artifact field" action or the "Update agenda" action, you should also select one of the "Send message to user" or "End conversation" actions. \ +Note that artifact and updates updates will always be executed before a message is sent to the user or the conversation is terminated. \ +Also note that only one message can be sent to the user at a time. + +Your task is to state your step-by-step reasoning for the best possible action(s), followed by a final recommendation of which action(s) to take, including all required parameters. +Someone else will be responsible for executing the action(s) you select and they will only have access to your output \ +(not any of the conversation history, artifact schema, or other context) so it is EXTREMELY important \ +that you clearly specify the value of all required parameters for each action you select. + +Conversation history: +{{ chat_history }} + +Latest agenda: +{{ agenda_state }} + +Current state of the artifact: +{{ artifact_state }}""" + + +async def conversation_plan_function( + kernel: Kernel, + chat_history: Conversation, + context: str, + rules: list[str], + conversation_flow: str, + current_artifact: Artifact, + req_settings: dict, + resource: GCResource, + agenda: Agenda, +) -> FunctionResult: + """Reasons/plans about the next best action(s) to continue the conversation. In this function, a DESCRIPTION of the possible actions + are surfaced to the agent. Note that the agent will not execute the actions, but will provide a step-by-step reasoning for the best + possible action(s). The implication here is that NO tool/plugin calls are made, only a description of what tool calls might be called + is created. + + Currently, the reasoning/plan from this function is passed to another function (which leverages openai tool calling) that will execute + the actions. + + Args: + kernel (Kernel): The kernel object. + chat_history (Conversation): The conversation history + context (str): Creator provided context of the conversation + rules (list[str]): Creator provided rules + conversation_flow (str): Creator provided conversation flow + current_artifact (Artifact): The current artifact + req_settings (dict): The request settings + resource (GCResource): The resource object + + Returns: + FunctionResult: The function result. + """ + # clear any pre-existing tools from the request settings + req_settings.tools = None + req_settings.tool_choice = None + + # clear any extension data + if hasattr(req_settings, "extension_data"): + req_settings.extension_data = {} + + kernel_function = kernel.add_function( + prompt=conversation_plan_template, + function_name="conversation_plan_function", + plugin_name="conversation_plan", + template_format="handlebars", + prompt_execution_settings=req_settings, + ) + + remaining_resource = resource.remaining_units + resource_instructions = resource.get_resource_instructions() + + # if there is a resource constraint and there's more than one turn left, include the update agenda action + if (resource_instructions != "") and (remaining_resource > 1): + if resource.get_resource_mode() == ResourceConstraintMode.MAXIMUM: + total_resource_str = f"does not exceed the remaining turns ({remaining_resource})." + ample_time_str = "" + elif resource.get_resource_mode() == ResourceConstraintMode.EXACT: + total_resource_str = ( + f"is equal to the remaining turns ({remaining_resource}). Do not leave any turns unallocated." + ) + ample_time_str = """If you have many turns remaining, instead of including wrap-up items or repeating topics, you should include items that increase the breadth and/or depth of the conversation \ +in a way that's directly relevant to the artifact (e.g. "collect additional details about X", "ask for clarification about Y", "explore related topic Z", etc.).""" + else: + logger.error("Invalid resource mode.") + else: + total_resource_str = "" + ample_time_str = "" + termination_instructions = _get_termination_instructions(resource) + + # only include the agenda if there is a resource constraint and there's more than one turn left + show_agenda = resource_instructions != "" and remaining_resource > 1 + + arguments = KernelArguments( + context=context, + artifact_schema=current_artifact.get_schema_for_prompt(), + rules=" ".join([r.strip() for r in rules]), + current_state_description=conversation_flow, + show_agenda=show_agenda, + remaining_resource=remaining_resource, + total_resource_str=total_resource_str, + ample_time_str=ample_time_str, + termination_instructions=termination_instructions, + resource_instructions=resource_instructions, + chat_history=chat_history.get_repr_for_prompt(), + agenda_state=agenda.get_agenda_for_prompt(), + artifact_state=current_artifact.get_artifact_for_prompt(), + ) + + result = await kernel.invoke(function=kernel_function, arguments=arguments) + return result + + +def _get_termination_instructions(resource: GCResource): + """ + Get the termination instructions for the conversation. This is contingent on the resources mode, + if any, that is available. + + Assumes we're always using turns as the resource unit. + + Args: + resource (GCResource): The resource object. + + Returns: + str: the termination instructions + """ + # Termination condition under no resource constraints + if resource.resource_constraint is None: + return "- You should pick this action as soon as you have completed the artifact to the best of your ability, \ +the conversation has come to a natural conclusion, or the user is not cooperating so you cannot continue the conversation." + + # Termination condition under exact resource constraints + if resource.resource_constraint.mode == ResourceConstraintMode.EXACT: + return ( + "- You should only pick this action if the user is not cooperating so you cannot continue the conversation." + ) + + # Termination condition under maximum resource constraints + elif resource.resource_constraint.mode == ResourceConstraintMode.MAXIMUM: + return "- You should pick this action as soon as you have completed the artifact to the best of your ability, \ +the conversation has come to a natural conclusion, or the user is not cooperating so you cannot continue the conversation." + + else: + logger.error("Invalid resource mode provided.") + return "" diff --git a/python/samples/demos/guided_conversations/guided_conversation/functions/execution.py b/python/samples/demos/guided_conversations/guided_conversation/functions/execution.py new file mode 100644 index 000000000000..4acc74fb5bf3 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/functions/execution.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Annotated + +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.functions import FunctionResult, KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +execution_template = """You are a helpful, thoughtful, and meticulous assistant. +You are conducting a conversation with a user. Your goal is to complete an artifact as thoroughly as possible by the end of the conversation. +You will be given some reasoning about the best possible action(s) to take next given the state of the conversation as well as the artifact schema. +The reasoning is supposed to state the recommended action(s) to take next, along with all required parameters for each action. +Your task is to execute ALL actions recommended in the reasoning in the order they are listed. +If the reasoning's specification of an action is incomplete (e.g. it doesn't include all required parameters for the action, \ +or some parameters are specified implicitly, such as "send a message that contains a greeting" instead of explicitly providing \ +the value of the "message" parameter), do not execute the action. You should never fill in missing or imprecise parameters yourself. +If the reasoning is not clear about which actions to take, or all actions are specified in an incomplete way, \ +return 'None' without selecting any action. + +Artifact schema: +{{ artifact_schema }} + +If the type in the schema is str, the "field_value" parameter in the action should be also be a string. +These are example parameters for the update_artifact action: {"field_name": "company_name", "field_value": "Contoso"} +DO NOT write JSON in the "field_value" parameter in this case. {"field_name": "company_name", "field_value": "{"value": "Contoso"}"} is INCORRECT. + +Reasoning: +{{ reasoning }}""" + + +@kernel_function(name="send_message_to_user", description="Sends a message to the user.") +def send_message(message: Annotated[str, "The message to send to the user."]) -> None: + return None + + +@kernel_function(name="end_conversation", description="Ends the conversation.") +def end_conversation() -> None: + return None + + +async def execution( + kernel: Kernel, reasoning: str, filter: list[str], req_settings: PromptExecutionSettings, artifact_schema: str +) -> FunctionResult: + """Executes the actions recommended by the reasoning/planning call in the given context. + + Args: + kernel (Kernel): The kernel object. + reasoning (str): The reasoning from a previous model call. + filter (list[str]): The list of plugins to INCLUDE for the tool call. + req_settings (PromptExecutionSettings): The prompt execution settings. + artifact (str): The artifact schema for the execution prompt. + + Returns: + FunctionResult: The result of the execution. + """ + filter = {"included_plugins": filter} + req_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(auto_invoke=False, filters=filter) + + kernel_function = kernel.add_function( + prompt=execution_template, + function_name="execution", + plugin_name="execution", + template_format="handlebars", + prompt_execution_settings=req_settings, + ) + + arguments = KernelArguments( + artifact_schema=artifact_schema, + reasoning=reasoning, + ) + + result = await kernel.invoke(function=kernel_function, arguments=arguments) + return result diff --git a/python/samples/demos/guided_conversations/guided_conversation/functions/final_update_plan.py b/python/samples/demos/guided_conversations/guided_conversation/functions/final_update_plan.py new file mode 100644 index 000000000000..e02f6483af2a --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/functions/final_update_plan.py @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel import Kernel +from semantic_kernel.functions import FunctionResult, KernelArguments + +from guided_conversation.utils.conversation_helpers import Conversation + +final_update_template = """You are a helpful, thoughtful, and meticulous assistant. +You just finished a conversation with a user.{{#if context}} Here is some additional context about the conversation: +{{ context }}{{/if}} + +Your goal is to complete an artifact as thoroughly and accurately as possible based on the conversation. + +This is the schema of the artifact: +{{ artifact_schema }} + +You will be given the current state of the artifact as well as the conversation history. +Note that if the value for a field in the artifact is 'Unanswered', it means that the field was not completed. \ +Some fields may have already been completed during the conversation. + +Your need to determine whether there are any fields that need to be updated, and if so, update them. +- You should only update a field if both of the following conditions are met: (a) the current state does NOT adequately reflect the conversation \ +and (b) you are able to submit a valid value for a field. \ +You are allowed to update completed fields, but you should only do so if the current state is inadequate, \ +e.g. the user corrected a mistake in their date of birth, but the artifact does not show the corrected version. \ +Remember that it's always an option to reset a field to "Unanswered" - this is often the best choice if the artifact contains incorrect information that cannot be corrected. \ +Do not submit a value that is identical to the current state of the field (e.g. if the field is already "Unanswered" and the user didn't provide any new information about it, you should not submit "Unanswered"). \ +- Make sure the value adheres to the constraints of the field as specified in the artifact schema. \ +If it's not possible to update a field with a valid value (e.g., the user provided an invalid date of birth), you should not update the field. +- If the artifact schema is open-ended (e.g. it asks you to rate how pressing the user's issue is, without specifying rules for doing so), \ +use your best judgment to determine whether you have enough information to complete the field based on the conversation. +- Prioritize accuracy over completion. You should never make up information or make assumptions in order to complete a field. \ +For example, if the field asks for a 10-digit phone number, and the user provided a 9-digit phone number, you should not add a digit to the phone number in order to complete the field. +- If the user wasn't able to provide all of the information needed to complete a field, \ +use your best judgment to determine if a partial answer is appropriate (assuming it adheres to the formatting requirements of the field). \ +For example, if the field asks for a description of symptoms along with details about when the symptoms started, but the user wasn't sure when their symptoms started, \ +it's better to record the information they do have rather than to leave the field unanswered (and to indicate that the user was unsure about the start date). +- It's possible to update multiple fields at once (assuming you're adhering to the above rules in all cases). It's also possible that no fields need to be updated. + +Your task is to state your step-by-step reasoning about what to update, followed by a final recommendation. +Someone else will be responsible for executing the updates and they will only have access to your output \ +(not any of the conversation history, artifact schema, or other context) so make sure to specify exactly which \ +fields to update and the values to update them with, or to state that no fields need to be updated. + + +Conversation history: +{{ conversation_history }} + +Current state of the artifact: +{{ artifact_state }}""" + + +async def final_update_plan_function( + kernel: Kernel, + req_settings: dict, + chat_history: Conversation, + context: str, + artifact_schema: str, + artifact_state: str, +) -> FunctionResult: + """This function is responsible for updating the artifact based on the conversation history when the conversation ends. This function may not always update the artifact, namely if the current state of the artifact is already accurate based on the conversation history. The function will return a step-by-step reasoning about what to update, followed by a final recommendation. The final recommendation will specify exactly which fields to update and the values to update them with, or to state that no fields need to be updated. + + + Args: + kernel (Kernel): The kernel object. + req_settings (dict): The prompt execution settings. + chat_history (Conversation): The conversation history. + context (str): The context of the conversation. + artifact_schema (str): The schema of the artifact. + artifact_state (str): The current state of the artifact. + + Returns: + FunctionResult: The result of the function (step-by-step reasoning about what to update in the artifact) + """ + req_settings.tools = None + req_settings.tool_choice = None + + # clear any extension data + if hasattr(req_settings, "extension_data"): + req_settings.extension_data = {} + + kernel_function = kernel.add_function( + prompt=final_update_template, + function_name="final_update_plan_function", + plugin_name="final_update_plan", + template_format="handlebars", + prompt_execution_settings=req_settings, + ) + + arguments = KernelArguments( + conversation_history=chat_history.get_repr_for_prompt(), + context=context, + artifact_schema=artifact_schema, + artifact_state=artifact_state, + ) + + result = await kernel.invoke(function=kernel_function, arguments=arguments) + + return result diff --git a/python/samples/demos/guided_conversations/guided_conversation/plugins/__init__.py b/python/samples/demos/guided_conversations/guided_conversation/plugins/__init__.py new file mode 100644 index 000000000000..2a50eae89411 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/plugins/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Microsoft. All rights reserved. diff --git a/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py b/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py new file mode 100644 index 000000000000..a74b897dcb9d --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/plugins/agenda.py @@ -0,0 +1,253 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Annotated + +from pydantic import Field, ValidationError +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.functions import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +from guided_conversation.utils.base_model_llm import BaseModelLLM +from guided_conversation.utils.conversation_helpers import Conversation, ConversationMessageType +from guided_conversation.utils.openai_tool_calling import ToolValidationResult +from guided_conversation.utils.plugin_helpers import PluginOutput, fix_error, update_attempts +from guided_conversation.utils.resources import ResourceConstraintMode, ResourceConstraintUnit, format_resource + +AGENDA_ERROR_CORRECTION_SYSTEM_TEMPLATE = """You are a helpful, thoughtful, and meticulous assistant. +You are conducting a conversation with a user. You tried to update the agenda, but the update was invalid. +You will be provided the history of your conversation with the user, \ +your previous attempt(s) at updating the agenda, and the error message(s) that resulted from your attempt(s). +Your task is to correct the update so that it is valid. \ +Your changes should be as minimal as possible - you are focused on fixing the error(s) that caused the update to be invalid. +Note that if the resource allocation is invalid, you must follow these rules: +1. You should not change the description of the first item (since it has already been executed), but you can change its resource allocation +2. For all other items, you can combine or split them, or assign them fewer or more resources, \ +but the content they cover collectively should not change (i.e. don't eliminate or add new topics). +For example, the invalid attempt was "item 1 = ask for date of birth (1 turn), item 2 = ask for phone number (1 turn), \ +item 3 = ask for phone type (1 turn), item 4 = explore treatment history (6 turns)", \ +and the error says you need to correct the total resource allocation to 7 turns. \ +A bad solution is "item 1 = ask for date of birth (1 turn), \ +item 2 = explore treatment history (6 turns)" because it eliminates the phone number and phone type topics. \ +A good solution is "item 1 = ask for date of birth (2 turns), item 2 = ask for phone number, phone type, +and treatment history (2 turns), item 3 = explore treatment history (3 turns)." + +Conversation history: +{{ conversation_history }} + +Previous attempts to update the agenda: +{{ previous_attempts }}""" + +UPDATE_AGENDA_TOOL = "update_agenda" + + +class _BaseAgendaItem(BaseModelLLM): + title: str = Field(description="Brief description of the item") + resource: int = Field(description="Number of turns required for the item") + + +class _BaseAgenda(BaseModelLLM): + items: list[_BaseAgendaItem] = Field( + description="Ordered list of items to be completed in the remainder of the conversation", + default_factory=list, + ) + + +class Agenda: + """An abstraction to manage a conversation agenda. The expected use case is that another agent will generate an agenda. + This class will validate if it is valid, and help correct it if it is not. + + Args: + kernel (Kernel): The Semantic Kernel instance to use for calling the LLM. Don't forget to set your + req_settings since this class uses tool calling functionality from the Semantic Kernel. + service_id (str): The service ID to use for the Semantic Kernel tool calling. One kernel can have multiple + services. The service ID is used to identify which service to use for LLM calls. The Agenda object + assumes that the service has tool calling capabilities and is some flavor of chat completion. + resource_constraint_mode (ResourceConstraintMode): The mode for resource constraints. + max_agenda_retries (int): The maximum number of retries for updating the agenda. + """ + + def __init__( + self, + kernel: Kernel, + service_id: str, + resource_constraint_mode: ResourceConstraintMode | None, + max_agenda_retries: int = 2, + ) -> None: + logger = logging.getLogger(__name__) + + self.id = "agenda_plugin" + self.kernel = Kernel() + self.logger = logger + self.kernel = kernel + self.service_id = service_id + + self.resource_constraint_mode = resource_constraint_mode + self.max_agenda_retries = max_agenda_retries + + self.agenda = _BaseAgenda() + + async def update_agenda( + self, + items: list[dict[str, str]], + remaining_turns: int, + conversation: Conversation, + ) -> PluginOutput: + """Updates the agenda model with the given items (generally generated by an LLM) and validates if the update is valid. + The agenda update reasons in terms of turns for validating the if the proposed agenda is valid. + If you wish to use a different resource unit, convert the value to turns in some way because + we found that LLMs do much better at reasoning in terms of turns. + + Args: + items (list[dict[str, str]]): A list of agenda items. + Each item should have the following keys: + - title (str): A brief description of the item. + - resource (int): The number of turns required for the item. + remaining_turns (int): The number of remaining turns. + conversation (Conversation): The conversation object. + + Returns: + PluginOutput: A PluginOutput object with the success status. Does not generate any messages. + """ + previous_attempts = [] + while True: + try: + # Try to update the agenda, and do extra validation checks + self.agenda.items = items + self._validate_agenda_update(items, remaining_turns) + self.logger.info(f"Agenda updated successfully: {self.get_agenda_for_prompt()}") + return PluginOutput(True, []) + except (ValidationError, ValueError) as e: + # Update the previous attempts and get instructions for the LLM + previous_attempts, llm_formatted_attempts = update_attempts( + error=e, attempt_id=str(items), previous_attempts=previous_attempts + ) + + # If we have reached the maximum number of retries return a failure + if len(previous_attempts) > self.max_agenda_retries: + self.logger.warning(f"Failed to update agenda after {self.max_agenda_retries} attempts.") + return PluginOutput(False, []) + else: + self.logger.info(f"Attempting to fix the agenda error. Attempt {len(previous_attempts)}.") + response = await self._fix_agenda_error(llm_formatted_attempts, conversation) + if response["validation_result"] != ToolValidationResult.SUCCESS: + self.logger.warning( + f"Failed to fix the agenda error due to a failure in the LLM tool call: {response['validation_result']}" + ) + return PluginOutput(False, []) + else: + # Use the result of the first tool call to try the update again + items = response["tool_args_list"][0]["items"] + + def get_agenda_for_prompt(self) -> str: + """Gets a string representation of the agenda for use in an LLM prompt. + + Returns: + str: A string representation of the agenda. + """ + agenda_json = self.agenda.model_dump() + agenda_items = agenda_json.get("items", []) + if len(agenda_items) == 0: + return "None" + agenda_str = "\n".join( + [ + f"{i+1}. [{format_resource(item['resource'], ResourceConstraintUnit.TURNS)}] {item['title']}" + for i, item in enumerate(agenda_items) + ] + ) + total_resource = format_resource(sum([item["resource"] for item in agenda_items]), ResourceConstraintUnit.TURNS) + agenda_str += f"\nTotal = {total_resource}" + return agenda_str + + # The following is the kernel function that will be provided to the LLM call + class Items: + title: Annotated[str, "Description of the item"] + resource: Annotated[int, "Number of turns required for the item"] + + @kernel_function( + name=UPDATE_AGENDA_TOOL, + description="Updates the agenda.", + ) + def update_agenda_items( + self, + items: Annotated[list[Items], "Ordered list of items to be completed in the remainder of the conversation"], + ): + pass + + async def _fix_agenda_error(self, previous_attempts: str, conversation: Conversation) -> None: + """Calls an LLM to try and fix an error in the agenda update.""" + req_settings = self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + req_settings.max_tokens = 2000 + + self.kernel.add_function(plugin_name=self.id, function=self.update_agenda_items) + filter = {"included_plugins": [self.id]} + req_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(auto_invoke=False, filters=filter) + + arguments = KernelArguments( + conversation_history=conversation.get_repr_for_prompt(exclude_types=[ConversationMessageType.REASONING]), + previous_attempts=previous_attempts, + ) + + return await fix_error( + kernel=self.kernel, + prompt_template=AGENDA_ERROR_CORRECTION_SYSTEM_TEMPLATE, + req_settings=req_settings, + arguments=arguments, + ) + + def _validate_agenda_update(self, items: list[dict[str, str]], remaining_turns: int) -> None: + """Validates if any constraints were violated while performing the agenda update. + + Args: + items (list[dict[str, str]]): A list of agenda items. + remaining_turns (int): The number of remaining turns. + + Raises: + ValueError: If any validation checks fail. + """ + # The total, proposed allocation of resources. + total_resources = sum([item["resource"] for item in items]) + + violations = [] + # In maximum mode, the total resources should not exceed the remaining turns + if (self.resource_constraint_mode == ResourceConstraintMode.MAXIMUM) and (total_resources > remaining_turns): + total_resource_instruction = ( + f"The total turns allocated in the agenda must not exceed the remaining amount ({remaining_turns})" + ) + violations.append(f"{total_resource_instruction}; but the current total is {total_resources}.") + + # In exact mode if the total resources were not exactly equal to the remaining turns + if (self.resource_constraint_mode == ResourceConstraintMode.EXACT) and (total_resources != remaining_turns): + total_resource_instruction = ( + f"The total turns allocated in the agenda must equal the remaining amount ({remaining_turns})" + ) + violations.append(f"{total_resource_instruction}; but the current total is {total_resources}.") + + # Check if any item has a resource value of 0 + if any(item["resource"] <= 0 for item in items): + violations.append("All items must have a resource value greater than 0.") + + # Raise an error if any violations were found + if len(violations) > 0: + self.logger.debug(f"Agenda update failed due to the following violations: {violations}.") + raise ValueError(" ".join(violations)) + + def to_json(self) -> dict: + agenda_dict = self.agenda.model_dump() + return { + "agenda": agenda_dict, + } + + @classmethod + def from_json( + cls, + json_data: dict, + kernel: Kernel, + service_id: str, + resource_constraint_mode: ResourceConstraintMode | None, + max_agenda_retries: int = 2, + ) -> "Agenda": + agenda = cls(kernel, service_id, resource_constraint_mode, max_agenda_retries) + agenda.agenda.items = json_data["agenda"]["items"] + return agenda diff --git a/python/samples/demos/guided_conversations/guided_conversation/plugins/artifact.py b/python/samples/demos/guided_conversations/guided_conversation/plugins/artifact.py new file mode 100644 index 000000000000..5e5a6ce7289f --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/plugins/artifact.py @@ -0,0 +1,480 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Annotated, Any, Literal, get_args, get_origin, get_type_hints + +from pydantic import BaseModel, create_model +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.functions import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +from guided_conversation.utils.base_model_llm import BaseModelLLM +from guided_conversation.utils.conversation_helpers import Conversation, ConversationMessageType +from guided_conversation.utils.openai_tool_calling import ToolValidationResult +from guided_conversation.utils.plugin_helpers import PluginOutput, fix_error, update_attempts +from semantic_kernel.contents import ChatMessageContent, AuthorRole + +ARTIFACT_ERROR_CORRECTION_SYSTEM_TEMPLATE = """You are a helpful, thoughtful, and meticulous assistant. +You are conducting a conversation with a user. Your goal is to complete an artifact as thoroughly as possible by the end of the conversation. +You have tried to update a field in the artifact, but the value you provided did not adhere \ +to the constraints of the field as specified in the artifact schema. +You will be provided the history of your conversation with the user, the schema for the field, \ +your previous attempt(s) at updating the field, and the error message(s) that resulted from your attempt(s). +Your task is to select the best possible action to take next: +1. Update artifact +- You should pick this action if you have a valid value to submit for the field in question. +2. Resume conversation +- You should pick this action if: (a) you do NOT have a valid value to submit for the field in question, and \ +(b) you need to ask the user for more information in order to obtain a valid value. \ +For example, if the user stated that their date of birth is June 2000, but the artifact field asks for the date of birth in the format \ +"YYYY-MM-DD", you should resume the conversation and ask the user for the day. + +Conversation history: +{{ conversation_history }} + +Schema: +{{ artifact_schema }} + +Previous attempts to update the field "{{ field_name }}" in the artifact: +{{ previous_attempts }}""" + +UPDATE_ARTIFACT_TOOL = "update_artifact_field" +RESUME_CONV_TOOL = "resume_conversation" + + +class Artifact: + """The Artifact plugin takes in a Pydantic base model, and robustly handles updating the fields of the model + A typical use case is as a form an agent must complete throughout a conversation. + Another use case is as a working memory for the agent. + + The primary interface is update_artifact, which takes in the field_name to update and its new value. + Additionally, the chat_history is passed in to help the agent make informed decisions in case an error occurs. + + The Artifact also exposes several functions to access internal state: + get_artifact_for_prompt, get_schema_for_prompt, and get_failed_fields. + """ + + def __init__( + self, kernel: Kernel, service_id: str, input_artifact: BaseModel, max_artifact_field_retries: int = 2 + ) -> None: + """ + Initialize the Artifact plugin with the given Pydantic base model. + + Args: + kernel (Kernel): The Semantic Kernel instance to use for calling the LLM. Don't forget to set your + req_settings since this class uses tool calling functionality from the Semantic Kernel. + service_id (str): The service ID to use for the Semantic Kernel tool calling. One kernel can have multiple + services. The service ID is used to identify which service to use for LLM calls. The Artifact object + assumes that the service has tool calling capabilities and is some flavor of chat completion. + input_artifact (BaseModel): The Pydantic base model to use as the artifact + max_artifact_field_retries (int): The maximum number of times to retry updating a field in the artifact + """ + logger = logging.getLogger(__name__) + self.logger = logger + + self.id = "artifact_plugin" + self.kernel = kernel + self.service_id = service_id + self.max_artifact_field_retries = max_artifact_field_retries + + self.original_schema = input_artifact.model_json_schema() + self.artifact = self._initialize_artifact(input_artifact) + + # failed_artifact_fields maps a field name to a list of the history of the failed attempts to update it + # dict: key = field, value = list of tuple[attempt, error message] + self.failed_artifact_fields: dict[str, list[tuple[str, str]]] = {} + + # The following are the kernel functions that will be provided to the LLM call + @kernel_function( + name=UPDATE_ARTIFACT_TOOL, + description="Sets the value of a field in the artifact", + ) + def update_artifact_field( + self, + field: Annotated[str, "The name of the field to update in the artifact"], + value: Annotated[str, "The value to set the field to"], + ) -> None: + pass + + @kernel_function( + name=RESUME_CONV_TOOL, + description="Resumes conversation to get more information from the user ", + ) + def resume_conversation(self): + pass + + async def update_artifact(self, field_name: str, field_value: Any, conversation: Conversation) -> PluginOutput: + """The core interface for the Artifact plugin. + This function will attempt to update the given field_name to the given field_value. + If the field_value fails Pydantic validation, an LLM will determine one of two actions to take. + Given the conversation as additional context the two actions are: + - Retry the update the artifact by fixing the formatting using the previous failed attempts as guidance + - Take no action or in other words, resume the conversation to ask the user for more information because the user gave incomplete or incorrect information + + Args: + field_name (str): The name of the field to update in the artifact + field_value (Any): The value to set the field to + conversation (Conversation): The conversation object that contains the history of the conversation + + Returns: + PluginOutput: An object with two fields: a boolean indicating success + and a list of conversation messages that may have been generated. + + Several outcomes can happen: + - The update may have failed due to + - A field_name that is not valid in the artifact. + - The field_value failing Pydantic validation and all retries failed. + - The model failed to correctly call a tool. + In this case, the boolean will be False and the list may contain a message indicating the failure. + + - The agent may have successfully updated the artifact or fixed it. + In this case, the boolean will be True and the list will contain a message indicating the update and possibly intermediate messages. + + - The agent may have decided to resume the conversation. + In this case, the boolean will be True and the messages may only contain messages indicated previous errors. + """ + + conversation_messages: list[ChatMessageContent] = [] + + # Check if the field name is valid, and return with a failure message if not + is_valid_field, msg = self._is_valid_field(field_name) + if not is_valid_field: + conversation_messages.append(msg) + return PluginOutput(update_successful=False, messages=conversation_messages) + + # Try to update the field, and handle any errors that occur until the field is + # successfully updated or skipped according to max_artifact_field_retries + while True: + try: + # Check if there have been too many previous failed attempts to update the field + if len(self.failed_artifact_fields.get(field_name, [])) >= self.max_artifact_field_retries: + self.logger.warning(f"Updating field {field_name} has failed too many times. Skipping.") + return False, conversation_messages + + # Attempt to update the artifact + msg = self._execute_update_artifact(field_name, field_value) + conversation_messages.append(msg) + return PluginOutput(True, conversation_messages) + except Exception as e: + self.logger.warning(f"Error updating field {field_name}: {e}. Retrying...") + # Handle update error will increment failed_artifact_fields, once it has failed + # greater than self.max_artifact_field_retries the field will be skipped and the loop will break + success, new_field_value = await self._handle_update_error(field_name, field_value, conversation, e) + + # The agent has successfully fixed the field. + if success and new_field_value is not None: + self.logger.info(f"Agent successfully fixed field {field_name}. New value: {new_field_value}") + field_value = new_field_value + # This is the case where the agent has decided to resume the conversation. + elif success: + self.logger.info( + f"Agent could not fix the field itself & decided to resume conversation to fix field {field_name}" + ) + return PluginOutput(True, conversation_messages) + self.logger.warning(f"Agent failed to fix field {field_name}. Retrying...") + # Otherwise, the agent has failed and we will go through the loop again + + def get_artifact_for_prompt(self) -> str: + """Returns a formatted JSON-like representation of the current state of the fields artifact. + Any fields that were failed are completely omitted. + + Returns: + str: The string representation of the artifact. + """ + failed_fields = self.get_failed_fields() + return {k: v for k, v in self.artifact.model_dump().items() if k not in failed_fields} + + def get_schema_for_prompt(self, filter_one_field: str | None = None) -> str: + """Gets a clean version of the original artifact schema, optimized for use in an LLM prompt. + + Args: + filter_one_field (str | None): If this is provided, only the schema for this one field will be returned. + + Returns: + str: The cleaned schema + """ + + def _clean_properties(schema: dict, failed_fields: list[str]) -> str: + properties = schema.get("properties", {}) + clean_properties = {} + for name, property_dict in properties.items(): + if name not in failed_fields: + cleaned_property = {} + for k, v in property_dict.items(): + if k in ["title", "default"]: + continue + cleaned_property[k] = v + clean_properties[name] = cleaned_property + + clean_properties_str = str(clean_properties) + clean_properties_str = clean_properties_str.replace("$ref", "type") + clean_properties_str = clean_properties_str.replace("#/$defs/", "") + return clean_properties_str + + # If filter_one_field is provided, only get the schema for that one field + if filter_one_field: + if not self._is_valid_field(filter_one_field): + self.logger.error(f'Field "{filter_one_field}" is not a valid field in the artifact.') + raise ValueError(f'Field "{filter_one_field}" is not a valid field in the artifact.') + filtered_schema = {"properties": {filter_one_field: self.original_schema["properties"][filter_one_field]}} + filtered_schema.update((k, v) for k, v in self.original_schema.items() if k != "properties") + schema = filtered_schema + else: + schema = self.original_schema + + failed_fields = self.get_failed_fields() + properties = _clean_properties(schema, failed_fields) + if not properties: + self.logger.error("No properties found in the schema.") + raise ValueError("No properties found in the schema.") + + types_schema = schema.get("$defs", {}) + custom_types = [] + for type_name, type_info in types_schema.items(): + if f"'type': '{type_name}'" in properties: + clean_schema = _clean_properties(type_info, []) + if clean_schema != "{}": + custom_types.append(f"{type_name} = {clean_schema}") + + if custom_types: + explanation = f"If you wanted to create a {type_name} object, for example, you would make a JSON object \ +with the following keys: {', '.join(types_schema[type_name]['properties'].keys())}." + custom_types_str = "\n".join(custom_types) + return f"""{properties} + +Here are the definitions for the custom types referenced in the artifact schema: +{custom_types_str} + +{explanation} +Remember that when updating the artifact, the field will be the original field name in the artifact and the JSON object(s) will be the value.""" + else: + return properties + + def get_failed_fields(self) -> list[str]: + """Get a list of fields that have failed all attempts to update. + + Returns: + list[str]: A list of field names that have failed all attempts to update. + """ + fields = [] + for field, attempts in self.failed_artifact_fields.items(): + if len(attempts) >= self.max_artifact_field_retries: + fields.append(field) + return fields + + def _initialize_artifact(self, artifact_model: BaseModel) -> BaseModelLLM: + """Create a new artifact model based on the one provided by the user + with "Unanswered" set for all fields. + + Args: + artifact_model (BaseModel): The Pydantic class provided by the user + + Returns: + BaseModelLLM: The new artifact model with "Unanswered" set for all fields + """ + modified_classes = self._modify_classes(artifact_model) + artifact = self._modify_base_artifact(artifact_model, modified_classes) + return artifact() + + def _get_type_if_subtype(self, target_type: type[Any], base_type: type[Any]) -> type[Any] | None: + """Recursively checks the target_type to see if it is a subclass of base_type or a generic including base_type. + + Args: + target_type: The type to check. + base_type: The type to check against. + + Returns: + The class type if target_type is base_type, a subclass of base_type, or a generic including base_type; otherwise, None. + """ + origin = get_origin(target_type) + if origin is None: + if issubclass(target_type, base_type): + return target_type + else: + # Recursively check if any of the arguments are the target type + for arg in get_args(target_type): + result = self._get_type_if_subtype(arg, base_type) + if result is not None: + return result + return None + + def _modify_classes(self, artifact_class: BaseModel) -> dict[str, type[BaseModelLLM]]: + """Find all classes used as type hints in the artifact, and modify them to set 'Unanswered' as a default and valid value for all fields.""" + modified_classes = {} + # Find any instances of BaseModel in the artifact class in the first "level" of type hints + for field_name, field_type in get_type_hints(artifact_class).items(): + is_base_model = self._get_type_if_subtype(field_type, BaseModel) + if is_base_model is not None: + modified_classes[field_name] = self._modify_base_artifact(is_base_model) + + return modified_classes + + def _replace_type_annotations( + self, field_annotation: type[Any] | None, modified_classes: dict[str, type[BaseModelLLM]] + ) -> type: + """Recursively replace type annotations with modified classes where applicable.""" + # Get the origin of the field annotation, which is the base type for generic types (e.g., List[str] -> list, Dict[str, int] -> dict) + origin = get_origin(field_annotation) + # Get the type arguments of the generic type (e.g., List[str] -> str, Dict[str, int] -> str, int) + args = get_args(field_annotation) + + if origin is None: + # The type is not generic; check if it's a subclass that needs to be replaced + if isinstance(field_annotation, type) and issubclass(field_annotation, BaseModelLLM): + return modified_classes.get(field_annotation.__name__, field_annotation) + return field_annotation + else: + # The type is generic; recursively replace the type annotations of the arguments + new_args = tuple(self._replace_type_annotations(arg, modified_classes) for arg in args) + return origin[new_args] + + def _modify_base_artifact( + self, artifact_model: type[BaseModelLLM], modified_classes: dict[str, type[BaseModelLLM]] | None = None + ) -> type[BaseModelLLM]: + """Create a new artifact model with 'Unanswered' as a default and valid value for all fields.""" + for _, field_info in artifact_model.model_fields.items(): + # Replace original classes with modified version + if modified_classes is not None: + field_info.annotation = self._replace_type_annotations(field_info.annotation, modified_classes) + # This makes it possible to always set a field to "Unanswered" + field_info.annotation = field_info.annotation | Literal["Unanswered"] + # This sets the default value to "Unanswered" + field_info.default = "Unanswered" + # This adds "Unanswered" as a possible value to any regex patterns + metadata = field_info.metadata + for m in metadata: + if hasattr(m, "pattern"): + m.pattern += "|Unanswered" + field_definitions = { + name: (field_info.annotation, field_info) for name, field_info in artifact_model.model_fields.items() + } + artifact_model = create_model("Artifact", __base__=BaseModelLLM, **field_definitions) + return artifact_model + + def _is_valid_field(self, field_name: str) -> tuple[bool, ChatMessageContent]: + """Check if the field_name is a valid field in the artifact. Returns True if it is, False and an error message otherwise.""" + if field_name not in self.artifact.model_fields: + error_message = f'Field "{field_name}" is not a valid field in the artifact.' + msg = ChatMessageContent( + role=AuthorRole.ASSISTANT, + content=error_message, + metadata={"type": ConversationMessageType.ARTIFACT_UPDATE, "turn_number": None}, + ) + return False, msg + return True, None + + async def _fix_artifact_error( + self, + field_name: str, + previous_attempts: str, + conversation_repr: str, + artifact_schema_repr: str, + ) -> dict[str, Any]: + """Calls the LLM to fix an error in the artifact using Semantic Kernel kernel.""" + + req_settings = self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + req_settings.max_tokens = 2000 + + self.kernel.add_function(plugin_name=self.id, function=self.update_artifact_field) + self.kernel.add_function(plugin_name=self.id, function=self.resume_conversation) + filter = {"included_plugins": [self.id]} + req_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(auto_invoke=False, filters=filter) + + arguments = KernelArguments( + field_name=field_name, + conversation_history=conversation_repr, + previous_attempts=previous_attempts, + artifact_schema=artifact_schema_repr, + settings=req_settings, + ) + + return await fix_error( + kernel=self.kernel, + prompt_template=ARTIFACT_ERROR_CORRECTION_SYSTEM_TEMPLATE, + req_settings=req_settings, + arguments=arguments, + ) + + def _execute_update_artifact( + self, + field_name: Annotated[str, "The name of the field to update in the artifact"], + field_value: Annotated[Any, "The value to set the field to"], + ) -> None: + """Update a field in the artifact with a new value. This will raise an error if the field_value is invalid.""" + setattr(self.artifact, field_name, field_value) + msg = ChatMessageContent( + role=AuthorRole.ASSISTANT, + content=f"Assistant updated {field_name} to {field_value}", + metadata={"type": ConversationMessageType.ARTIFACT_UPDATE, "turn_number": None}, + ) + return msg + + async def _handle_update_error( + self, field_name: str, field_value: Any, conversation: Conversation, error: Exception + ) -> tuple[bool, Any]: + """ + Handles the logic for when an error occurs while updating a field. + Creates the appropriate context for the model and calls the LLM to fix the error. + + Args: + field_name (str): The name of the field to update in the artifact + field_value (Any): The value to set the field to + conversation (Conversation): The conversation object that contains the history of the conversation + error (Exception): The error that occurred while updating the field + + Returns: + tuple[bool, Any]: A tuple containing a boolean indicating success and the new field value if successful (if not, then None) + """ + # Update the failed attempts for the field + previous_attempts = self.failed_artifact_fields.get(field_name, []) + previous_attempts, llm_formatted_attempts = update_attempts( + error=error, attempt_id=str(field_value), previous_attempts=previous_attempts + ) + self.failed_artifact_fields[field_name] = previous_attempts + + # Call the LLM to fix the error + conversation_history_repr = conversation.get_repr_for_prompt(exclude_types=[ConversationMessageType.REASONING]) + artifact_schema_repr = self.get_schema_for_prompt(filter_one_field=field_name) + result = await self._fix_artifact_error( + field_name, llm_formatted_attempts, conversation_history_repr, artifact_schema_repr + ) + + # Handling the result of the LLM call + if result["validation_result"] != ToolValidationResult.SUCCESS: + return False, None + # Only consider the first tool call + tool_name = result["tool_names"][0] + tool_args = result["tool_args_list"][0] + if tool_name == f"{self.id}-{UPDATE_ARTIFACT_TOOL}": + field_value = tool_args["value"] + return True, field_value + elif tool_name == f"{self.id}-{RESUME_CONV_TOOL}": + return True, None + + def to_json(self) -> dict: + artifact_fields = self.artifact.model_dump() + return { + "artifact": artifact_fields, + "failed_fields": self.failed_artifact_fields, + } + + @classmethod + def from_json( + cls, + json_data: dict, + kernel: Kernel, + service_id: str, + input_artifact: BaseModel, + max_artifact_field_retries: int = 2, + ) -> "Artifact": + artifact = cls(kernel, service_id, input_artifact, max_artifact_field_retries) + + artifact.failed_artifact_fields = json_data["failed_fields"] + + # Iterate over artifact fields and set them to the values in the json data + # Skip any fields that are set as "Unanswered" + for field_name, field_value in json_data["artifact"].items(): + if field_value != "Unanswered": + setattr(artifact.artifact, field_name, field_value) + return artifact diff --git a/python/samples/demos/guided_conversations/guided_conversation/plugins/guided_conversation_agent.py b/python/samples/demos/guided_conversations/guided_conversation/plugins/guided_conversation_agent.py new file mode 100644 index 000000000000..5af96cc63168 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/plugins/guided_conversation_agent.py @@ -0,0 +1,390 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import Callable +from dataclasses import dataclass, field +from enum import Enum +import logging + +from pydantic import BaseModel +from semantic_kernel import Kernel + +from guided_conversation.functions.conversation_plan import conversation_plan_function +from guided_conversation.functions.execution import end_conversation, execution, send_message +from guided_conversation.functions.final_update_plan import final_update_plan_function +from guided_conversation.plugins.agenda import Agenda +from guided_conversation.plugins.artifact import Artifact +from guided_conversation.utils.conversation_helpers import Conversation, ConversationMessageType +from guided_conversation.utils.openai_tool_calling import ( + ToolValidationResult, + parse_function_result, + validate_tool_calling, +) +from guided_conversation.utils.plugin_helpers import PluginOutput, format_kernel_functions_as_tools +from guided_conversation.utils.resources import GCResource, ResourceConstraint +from semantic_kernel.contents import ChatMessageContent, AuthorRole +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.functions import KernelArguments + +MAX_DECISION_RETRIES = 2 + + +class ToolName(Enum): + UPDATE_ARTIFACT_TOOL = "update_artifact_field" + UPDATE_AGENDA_TOOL = "update_agenda" + SEND_MSG_TOOL = "send_message_to_user" + END_CONV_TOOL = "end_conversation" + GENERATE_PLAN_TOOL = "generate_plan" + EXECUTE_PLAN_TOOL = "execute_plan" + FINAL_UPDATE_TOOL = "final_update" + GUIDED_CONVERSATION_AGENT_TOOLBOX = "gc_agent" + + +@dataclass +class GCOutput: + """The output of the GuidedConversation agent. + + Args: + ai_message (str): The message to send to the user. + is_conversation_over (bool): Whether the conversation is over. + """ + + ai_message: str | None = field(default=None) + is_conversation_over: bool = field(default=False) + + +class GuidedConversation: + def __init__( + self, + kernel: Kernel, + artifact: BaseModel, + rules: list[str], + conversation_flow: str | None, + context: str | None, + resource_constraint: ResourceConstraint | None, + service_id: str = "gc_main", + ) -> None: + """Initializes the GuidedConversation agent. + + Args: + kernel (Kernel): An instance of Kernel. Must come initialized with a AzureOpenAI or OpenAI service. + artifact (BaseModel): The artifact to be used as the goal/working memory/output of the conversation. + rules (list[str]): The rules to be used in the guided conversation (dos and donts). + conversation_flow (str | None): The conversation flow to be used in the guided conversation. + context (str | None): The scene-setting for the conversation. + resource_constraint (ResourceConstraint | None): The limit on the conversation length (for ex: number of turns). + service_id (str): Provide a service_id associated with the kernel's service that was provided. + """ + + self.logger = logging.getLogger(__name__) + self.kernel = kernel + self.service_id = service_id + + self.conversation = Conversation() + self.resource = GCResource(resource_constraint) + self.artifact = Artifact(self.kernel, self.service_id, artifact) + self.rules = rules + self.conversation_flow = conversation_flow + self.context = context + self.agenda = Agenda(self.kernel, self.service_id, self.resource.get_resource_mode(), MAX_DECISION_RETRIES) + + # Plugins will be executed in the order of this list. + self.plugins_order = [ + ToolName.UPDATE_ARTIFACT_TOOL.value, + ToolName.UPDATE_AGENDA_TOOL.value, + ] + + # Terminal plugins are plugins that are handled in a special way: + # - Only one terminal plugin can be called in a single step of the conversation as it leads to the end of the conversation step. + # - The order of this list determines the execution priority. + # - For example, if the model chooses to both call send message and end conversation, + # Send message will be executed first and since the orchestration step returns, end conversation will not be executed. + self.terminal_plugins_order = [ + ToolName.SEND_MSG_TOOL.value, + ToolName.END_CONV_TOOL.value, + ] + + self.current_failed_decision_attempts = 0 + + # Set common request settings + self.req_settings = self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + self.req_settings.max_tokens = 2000 + self.kernel.add_function(plugin_name=ToolName.SEND_MSG_TOOL.value, function=send_message) + self.kernel.add_function(plugin_name=ToolName.END_CONV_TOOL.value, function=end_conversation) + self.kernel.add_function( + plugin_name=ToolName.UPDATE_ARTIFACT_TOOL.value, function=self.artifact.update_artifact_field + ) + self.kernel.add_function( + plugin_name=ToolName.UPDATE_AGENDA_TOOL.value, function=self.agenda.update_agenda_items + ) + + # Set orchestrator functions for the agent + self.kernel_function_generate_plan = self.kernel.add_function( + plugin_name="gc_agent", function=self.generate_plan + ) + self.kernel_function_execute_plan = self.kernel.add_function(plugin_name="gc_agent", function=self.execute_plan) + self.kernel_function_final_update = self.kernel.add_function(plugin_name="gc_agent", function=self.final_update) + + async def step_conversation(self, user_input: str | None = None) -> GCOutput: + """Given a message from a user, this will execute the guided conversation agent up until a + terminal plugin is called or the maximum number of decision retries is reached.""" + self.logger.info(f"Starting conversation step {self.resource.turn_number}.") + self.resource.start_resource() + self.current_failed_decision_attempts = 0 + if user_input: + self.conversation.add_messages( + ChatMessageContent( + role=AuthorRole.USER, + content=user_input, + metadata={"turn_number": self.resource.turn_number, "type": ConversationMessageType.DEFAULT}, + ) + ) + + # Keep generating and executing plans until a terminal plugin is called + # or the maximum number of decision retries is reached. + while self.current_failed_decision_attempts < MAX_DECISION_RETRIES: + plan = await self.kernel.invoke(self.kernel_function_generate_plan) + executed_plan = await self.kernel.invoke( + self.kernel_function_execute_plan, KernelArguments(plan=plan.value) + ) + success, plugins, terminal_plugins = executed_plan.value + + if success != ToolValidationResult.SUCCESS: + self.logger.warning( + f"Failed to parse tools in plan on retry attempt {self.current_failed_decision_attempts} out of {MAX_DECISION_RETRIES}." + ) + self.current_failed_decision_attempts += 1 + continue + + # Run a step of the orchestration logic based on the plugins called by the model. + # First execute all regular plugins (if any) in the order returned by execute_plan + for plugin_name, plugin_args in plugins: + if plugin_name == f"{ToolName.UPDATE_ARTIFACT_TOOL.value}-{ToolName.UPDATE_ARTIFACT_TOOL.value}": + plugin_args["conversation"] = self.conversation + # Modify plugin_args such that field=field_name and value=field_value + plugin_args["field_name"] = plugin_args.pop("field") + plugin_args["field_value"] = plugin_args.pop("value") + await self._call_plugin(self.artifact.update_artifact, plugin_args) + elif plugin_name == f"{ToolName.UPDATE_AGENDA_TOOL.value}-{ToolName.UPDATE_AGENDA_TOOL.value}": + plugin_args["remaining_turns"] = self.resource.get_remaining_turns() + plugin_args["conversation"] = self.conversation + await self._call_plugin(self.agenda.update_agenda, plugin_args) + + # Then execute the first terminal plugin (if any) + if terminal_plugins: + gc_output = GCOutput() + plugin_name, plugin_args = terminal_plugins[0] + if plugin_name == f"{ToolName.SEND_MSG_TOOL.value}-{ToolName.SEND_MSG_TOOL.value}": + gc_output.ai_message = plugin_args["message"] + elif plugin_name == f"{ToolName.END_CONV_TOOL.value}-{ToolName.END_CONV_TOOL.value}": + await self.kernel.invoke(self.kernel_function_final_update) + gc_output.ai_message = "I will terminate this conversation now. Thank you for your time!" + gc_output.is_conversation_over = True + self.resource.increment_resource() + return gc_output + + # Handle case where the maximum number of decision retries was reached. + self.logger.warning(f"Failed to execute plan after {MAX_DECISION_RETRIES} attempts.") + self.resource.increment_resource() + gc_output = GCOutput() + gc_output.ai_message = "An error occurred and I must sadly end the conversation." + gc_output.is_conversation_over = True + return gc_output + + @kernel_function( + name=ToolName.GENERATE_PLAN_TOOL.value, + description="Generate a plan based on a time constraint for the current state of the conversation.", + ) + async def generate_plan(self) -> str: + """Generate a plan for the current state of the conversation. The idea here is to explicitly let the model plan before + generating any plugin calls. This has been shown to increase reliability. + + Returns: + str: The plan generated by the plan function. + """ + self.logger.info("Generating plan for the current state of the conversation") + plan = await conversation_plan_function( + self.kernel, + self.conversation, + self.context, + self.rules, + self.conversation_flow, + self.artifact, + self.req_settings, + self.resource, + self.agenda, + ) + plan = plan.value[0].content + self.conversation.add_messages( + ChatMessageContent( + role=AuthorRole.ASSISTANT, + content=plan, + metadata={"turn_number": self.resource.turn_number, "type": ConversationMessageType.REASONING}, + ) + ) + return plan + + @kernel_function( + name=ToolName.EXECUTE_PLAN_TOOL.value, + description="Given the generated plan by the model, use that plan to generate which functions to execute.", + ) + async def execute_plan( + self, plan: str + ) -> tuple[ToolValidationResult, list[tuple[str, dict]], list[tuple[str, dict]]]: + """Given the generated plan by the model, use that plan to generate which functions to execute. + Once the tool calls are generated by the model, we sort them into two groups: regular plugins and terminal plugins + according to the definition in __init__ + + Args: + plan (str): The plan generated by the model. + + Returns: + tuple[ToolValidationResult, list[tuple[str, dict]], list[tuple[str, dict]]]: A tuple containing the validation result + of the tool calls, the regular plugins to execute, and the terminal plugins to execute alongside their arguments. + """ + self.logger.info("Executing plan.") + + req_settings = self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + functions = self.plugins_order + self.terminal_plugins_order + result = await execution( + kernel=self.kernel, + reasoning=plan, + filter=functions, + req_settings=req_settings, + artifact_schema=self.artifact.get_schema_for_prompt(), + ) + + parsed_result = parse_function_result(result) + formatted_tools = format_kernel_functions_as_tools(self.kernel, functions) + validation_result = validate_tool_calling(parsed_result, formatted_tools) + + # Sort plugin calls into two groups in the order of the corresponding lists defined in __init__ + plugins = [] + terminal_plugins = [] + if validation_result == ToolValidationResult.SUCCESS: + for plugin in self.plugins_order: + for idx, called_plugin_name in enumerate(parsed_result["tool_names"]): + plugin_name = f"{plugin}-{plugin}" + if called_plugin_name == plugin_name: + plugins.append((parsed_result["tool_names"][idx], parsed_result["tool_args_list"][idx])) + + for terminal_plugin in self.terminal_plugins_order: + for idx, called_plugin_name in enumerate(parsed_result["tool_names"]): + terminal_plugin_name = f"{terminal_plugin}-{terminal_plugin}" + if called_plugin_name == terminal_plugin_name: + terminal_plugins.append( + (parsed_result["tool_names"][idx], parsed_result["tool_args_list"][idx]) + ) + + return validation_result, plugins, terminal_plugins + + @kernel_function( + name=ToolName.FINAL_UPDATE_TOOL.value, + description="After the last message of a conversation was added to the conversation history, perform a final update of the artifact", + ) + async def final_update(self): + """Explicit final update of the artifact after the conversation ends.""" + self.logger.info("Final update of the artifact prior to terminating the conversation.") + + # Get a plan from the model + reasoning_response = await final_update_plan_function( + kernel=self.kernel, + req_settings=self.req_settings, + chat_history=self.conversation, + context=self.context, + artifact_schema=self.artifact.get_schema_for_prompt(), + artifact_state=self.artifact.get_artifact_for_prompt(), + ) + + # Then generate the functions to be executed + req_settings = self.kernel.get_prompt_execution_settings_from_service_id(self.service_id) + + functions = [ToolName.UPDATE_ARTIFACT_TOOL.value] + execution_response = await execution( + kernel=self.kernel, + reasoning=reasoning_response.value[0].content, + filter=functions, + req_settings=req_settings, + artifact_schema=self.artifact.get_schema_for_prompt(), + ) + + parsed_result = parse_function_result(execution_response) + formatted_tools = format_kernel_functions_as_tools(self.kernel, functions) + validation_result = validate_tool_calling(parsed_result, formatted_tools) + + # If the tool call was successful, update the artifact. + if validation_result != ToolValidationResult.SUCCESS: + self.logger.warning(f"No artifact change during final update due to: {validation_result.value}") + pass + else: + for i in range(len(parsed_result["tool_names"])): + tool_name = parsed_result["tool_names"][i] + tool_args = parsed_result["tool_args_list"][i] + if ( + tool_name == f"{ToolName.UPDATE_ARTIFACT_TOOL.value}-{ToolName.UPDATE_ARTIFACT_TOOL.value}" + and "field" in tool_args + and "value" in tool_args + ): + # Check if tool_args contains the field and value to update + plugin_output = await self.artifact.update_artifact( + field_name=tool_args["field_name"], + field_value=tool_args["field_value"], + conversation=self.conversation, + ) + if plugin_output.update_successful: + self.logger.info(f"Artifact field {tool_args['field_name']} successfully updated.") + # Set turn numbers + for message in plugin_output.messages: + message.turn_number = self.resource.turn_number + self.conversation.add_messages(plugin_output.messages) + else: + self.logger.error(f"Final artifact field update of {tool_args['field_name']} failed.") + + def to_json(self) -> dict: + return { + "artifact": self.artifact.to_json(), + "agenda": self.agenda.to_json(), + "chat_history": self.conversation.to_json(), + "resource": self.resource.to_json(), + } + + async def _call_plugin(self, plugin_function: Callable, plugin_args: dict): + """Common logic whenever any plugin is called like handling errors and appending to chat history.""" + self.logger.info(f"Calling plugin {plugin_function.__name__}.") + output: PluginOutput = await plugin_function(**plugin_args) + if output.update_successful: + # Set turn numbers + for message in output.messages: + message.metadata["turn_number"] = self.resource.turn_number + self.conversation.add_messages(output.messages) + else: + self.logger.warning( + f"Plugin {plugin_function.__name__} failed to execute on attempt {self.current_failed_decision_attempts} out of {MAX_DECISION_RETRIES}." + ) + self.current_failed_decision_attempts += 1 + + @classmethod + def from_json( + cls, + json_data: dict, + kernel: Kernel, + service_id: str = "gc_main", + ) -> "GuidedConversation": + artifact = Artifact.from_json( + json_data["artifact"], + kernel=kernel, + service_id=service_id, + input_artifact=cls.artifact, + max_artifact_field_retries=MAX_DECISION_RETRIES, + ) + agenda = Agenda.from_json( + json_data["agenda"], + kernel=kernel, + service_id=service_id, + resource_constraint_mode=cls.resource_constraint.mode, + ) + chat_history = Conversation.from_json(json_data["chat_history"]) + resource = GCResource.from_json(json_data["resource"]) + + gc = cls(kernel, artifact, agenda, chat_history, resource, service_id) + + return gc diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/__init__.py b/python/samples/demos/guided_conversations/guided_conversation/utils/__init__.py new file mode 100644 index 000000000000..2a50eae89411 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Microsoft. All rights reserved. diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/base_model_llm.py b/python/samples/demos/guided_conversations/guided_conversation/utils/base_model_llm.py new file mode 100644 index 000000000000..741fbaef3077 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/base_model_llm.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +import ast +from types import NoneType +from typing import get_args + +from pydantic import BaseModel, ValidationInfo, field_validator + + +class BaseModelLLM(BaseModel): + """A Pydantic base class for use when an LLM is completing fields. Provides a custom field validator and Pydantic Config.""" + + @field_validator("*", mode="before") + def parse_literal_eval(cls, value: str, info: ValidationInfo): # noqa: N805 + """An LLM will always result in a string (e.g. '["x", "y"]'), so we need to parse it to the correct type""" + # Get the type hints for the field + annotation = cls.model_fields[info.field_name].annotation + typehints = get_args(annotation) + if len(typehints) == 0: + typehints = [annotation] + + # Usually fields that are NoneType have another type hint as well, e.g. str | None + # if the LLM returns "None" and the field allows NoneType, we should return None + # without this code, the next if-block would leave the string "None" as the value + if (NoneType in typehints) and (value == "None"): + return None + + # If the field allows strings, we don't parse it - otherwise a validation error might be raised + # e.g. phone_number = "1234567890" should not be converted to an int if the type hint is str + if str in typehints: + return value + try: + evaluated_value = ast.literal_eval(value) + return evaluated_value + except Exception: + return value + + class Config: + # Ensure that validation happens every time a field is updated, not just when the artifact is created + validate_assignment = True + # Do not allow extra fields to be added to the artifact + extra = "forbid" diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/conversation_helpers.py b/python/samples/demos/guided_conversations/guided_conversation/utils/conversation_helpers.py new file mode 100644 index 000000000000..2b449e13a9ee --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/conversation_helpers.py @@ -0,0 +1,167 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass, field +import datetime +from enum import Enum +import logging +from typing import Union + +from semantic_kernel.contents import ChatMessageContent + + +class ConversationMessageType(Enum): + DEFAULT = "default" + ARTIFACT_UPDATE = "artifact-update" + REASONING = "reasoning" + + +@dataclass +class Conversation: + """An abstraction to represent a list of messages and common operations such as adding messages + and getting a string representation. + + Args: + conversation_messages (list[ChatMessageContent]): A list of ChatMessageContent objects. + """ + + logger = logging.getLogger(__name__) + conversation_messages: list[ChatMessageContent] = field(default_factory=list) + + def add_messages(self, messages: Union[ChatMessageContent, list[ChatMessageContent], "Conversation", None]) -> None: + """Add a message, list of messages to the conversation or merge another conversation into the end of this one. + + Args: + messages (Union[ChatMessageContent, list[ChatMessageContent], "Conversation"]): The message(s) to add. + All messages will be added to the end of the conversation. + + Returns: + None + """ + if isinstance(messages, list): + self.conversation_messages.extend(messages) + elif isinstance(messages, Conversation): + self.conversation_messages.extend(messages.conversation_messages) + elif isinstance(messages, ChatMessageContent): + # if ChatMessageContent.metadata doesn't have type, then add default + if "type" not in messages.metadata: + messages.metadata["type"] = ConversationMessageType.DEFAULT + self.conversation_messages.append(messages) + else: + self.logger.warning(f"Invalid message type: {type(messages)}") + return None + + def get_repr_for_prompt( + self, + exclude_types: list[ConversationMessageType] | None = None, + ) -> str: + """Create a string representation of the conversation history for use in LLM prompts. + + Args: + exclude_types (list[ConversationMessageType] | None): A list of message types to exclude from the conversation + history. If None, all message types will be included. + + Returns: + str: A string representation of the conversation history. + """ + if len(self.conversation_messages) == 0: + return "None" + + # Do not include the excluded messages types in the conversation history repr. + if exclude_types is not None: + conversation_messages = [ + message + for message in self.conversation_messages + if "type" in message.metadata and message.metadata["type"] not in exclude_types + ] + else: + conversation_messages = self.conversation_messages + + to_join = [] + current_turn = None + for message in conversation_messages: + participant_name = message.name + # Modify the default user to be capitalized for consistency with how assistant is written. + if participant_name == "user": + participant_name = "User" + + # If the turn number is None, don't include it in the string + if "turn_number" in message.metadata and current_turn != message.metadata["turn_number"]: + current_turn = message.metadata["turn_number"] + to_join.append(f"[Turn {current_turn}]") + + # Add the message content + if (message.role == "assistant") and ( + "type" in message.metadata and message.metadata["type"] == ConversationMessageType.ARTIFACT_UPDATE + ): + to_join.append(message.content) + elif message.role == "assistant": + to_join.append(f"Assistant: {message.content}") + else: + user_string = message.content.strip() + if user_string == "": + to_join.append(f"{participant_name}: ") + else: + to_join.append(f"{participant_name}: {user_string}") + conversation_string = "\n".join(to_join) + return conversation_string + + def set_turn_numbers(self, turn_number: int) -> None: + """Set all the turn numbers in the conversation to the given turn number. + + Args: + turn_number (int): The turn number to set for all messages. + + Returns: + None""" + for message in self.conversation_messages: + message.metadata["turn_number"] = turn_number + + def message_to_json(message: ChatMessageContent) -> dict: + """ + Convert a ChatMessageContent object to a JSON serializable dictionary. + + Args: + message (ChatMessageContent): The ChatMessageContent object to convert to JSON. + + Returns: + dict: A JSON serializable dictionary representation of the ChatMessageContent object. + """ + return { + "role": message.role, + "content": message.content, + "name": message.name, + "metadata": { + "turn_number": message.metadata["turn_number"] if "turn_number" in message.metadata else None, + "type": message.metadata["type"] if "type" in message.metadata else ConversationMessageType.DEFAULT, + }, + } + + def to_json(self) -> dict: + json_data = {} + json_data["conversation"] = {} + json_data["conversation"]["conversation_messages"] = [ + self.message_to_json(message) for message in self.conversation_messages + ] + return json_data + + @classmethod + def from_json( + cls, + json_data: dict, + ) -> "Conversation": + conversation = cls() + for message in json_data["conversation"]["conversation_messages"]: + conversation.add_messages( + ChatMessageContent( + role=message["role"], + content=message["content"], + name=message["name"], + metadata={ + "turn_number": message["turn_number"], + "type": ConversationMessageType(message["type"]), + "timestamp": datetime.datetime.fromisoformat(message["timestamp"]), + }, + ) + ) + + return conversation diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/openai_tool_calling.py b/python/samples/demos/guided_conversations/guided_conversation/utils/openai_tool_calling.py new file mode 100644 index 000000000000..fc9339976840 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/openai_tool_calling.py @@ -0,0 +1,158 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass +from enum import Enum +import json +import logging +from typing import Any + +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.functions import FunctionResult + +logger = logging.getLogger(__name__) + + +@dataclass +class ToolArg: + argument_name: str + required: bool + + +@dataclass +class Tool: + name: str + args: list[ToolArg] + + +class ToolValidationResult(Enum): + NO_TOOL_CALLED = "No tool was called" + INVALID_TOOL_CALLED = "A tool was called with an unexpected name" + MISSING_REQUIRED_ARGUMENT = "The tool called is missing a required argument" + INVALID_ARGUMENT_TYPE = "The value of an argument is of an unexpected type" + SUCCESS = "success" + + +def parse_function_result(response: FunctionResult) -> dict[str, Any]: + """Parse the response from SK's FunctionResult object into only the relevant data for easier downstream processing. + This should only be used when you expect the response to contain tool calls. + + Args: + response (FunctionResult): The response from the kernel. + + Returns: + dict[str, Any]: The parsed response data with the following format if n was set greater than 1: + { + "choices": [ + { + "tool_names": list[str], + "tool_args_list": list[dict[str, Any]], + "message": str, + "finish_reason": str, + "validation_result": ToolValidationResult + }, ... + ] + } + Otherwise, the response will directly contain the data from the first choice (tool_names, etc keys) + """ + response_data: dict[str, Any] = {"choices": []} + for response_choice in response.value: + response_data_curr = {} + finish_reason = response_choice.finish_reason + + if finish_reason == "tool_calls": + tool_names = [] + tool_args_list = [] + # Only look at the items that are of instance `FunctionCallContent` + tool_calls = [item for item in response_choice.items if isinstance(item, FunctionCallContent)] + for tool_call in tool_calls: + if "-" not in tool_call.name: + logger.info(f"Tool call name {tool_call.name} does not match naming convention - modifying name.") + tool_names.append(tool_call.name + "-" + tool_call.name) + else: + tool_names.append(tool_call.name) + try: + tool_args = json.loads(tool_call.arguments) + except json.JSONDecodeError: + logger.warning(f"Failed to parse tool arguments for tool call {tool_call.name}. Using empty dict.") + tool_args = {} + tool_args_list.append(tool_args) + response_data_curr["tool_names"] = tool_names + response_data_curr["tool_args_list"] = tool_args_list + + response_data_curr["message"] = response_choice.content + response_data_curr["finish_reason"] = finish_reason + response_data["choices"].append(response_data_curr) + + if len(response_data["choices"]) == 1: + response_data.update(response_data["choices"][0]) + del response_data["choices"] + + return response_data + + +def construct_tool_objects(kernel_function_tools: dict) -> list[Tool]: + """Construct a list of Tool objects from the kernel function tools definition. + + Args: + kernel_function_tools (dict): The definition of tools done by the kernel function. + + Returns: + list[Tool]: The list of Tool objects constructed from the kernel function tools definition. + """ + + tool_objects: list[Tool] = [] + for tool_definition in kernel_function_tools: + tool_name = tool_definition["function"]["name"] + tool_args = tool_definition["function"]["parameters"]["properties"] + + tool_arg_objects: list[ToolArg] = [] + for argument_name, _ in tool_args.items(): + tool_arg = ToolArg(argument_name=argument_name, required=False) + tool_arg_objects.append(tool_arg) + + required_args = tool_definition["function"]["parameters"]["required"] + for tool_arg_object in tool_arg_objects: + if tool_arg_object.argument_name in required_args: + tool_arg_object.required = True + + tool_objects.append(Tool(name=tool_name, args=tool_arg_objects)) + return tool_objects + + +def validate_tool_calling(response: dict[str, Any], request_tool_param: dict) -> ToolValidationResult: + """Validate that the response from the LLM called tools corrected. + 1. Check if any tool was called. + 2. Check if the tools called were valid (names match) + 3. Check if all the required arguments were passed. + + Args: + response (dict[str, Any]): The response from the LLM containing the tools called (output of parse_function_result) + tools (list[Tool]): The list of tools that can be called by the model. + + Returns: + ToolValidationResult: The result of the validation. ToolValidationResult.SUCCESS if the validation passed. + """ + + tool_objects = construct_tool_objects(request_tool_param) + tool_names = response.get("tool_names", []) + tool_args_list = response.get("tool_args_list", []) + + # Check if any tool was called. + if not tool_names: + logger.info("No tool was called.") + return ToolValidationResult.NO_TOOL_CALLED + + for tool_name, tool_args in zip(tool_names, tool_args_list, strict=True): + # Check the tool names is valid. + tool: Tool | None = next((t for t in tool_objects if t.name == tool_name), None) + if not tool: + logger.warning(f"Invalid tool called: {tool_name}") + return ToolValidationResult.INVALID_TOOL_CALLED + + for arg in tool.args: + # Check if the required arguments were passed. + if arg.required and arg.argument_name not in tool_args: + logger.warning(f"Missing required argument '{arg.argument_name}' for tool '{tool_name}'.") + return ToolValidationResult.MISSING_REQUIRED_ARGUMENT + + return ToolValidationResult.SUCCESS diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/plugin_helpers.py b/python/samples/demos/guided_conversations/guided_conversation/utils/plugin_helpers.py new file mode 100644 index 000000000000..f9a5739c1edb --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/plugin_helpers.py @@ -0,0 +1,116 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass + +from pydantic import ValidationError +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.function_calling_utils import kernel_function_metadata_to_function_call_format +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.functions import KernelArguments +from semantic_kernel.contents import ChatMessageContent + +from guided_conversation.utils.openai_tool_calling import parse_function_result, validate_tool_calling + + +@dataclass +class PluginOutput: + """A wrapper for all Guided Conversation Plugins. This class is used to return the output of a generic plugin. + + Args: + update_successful (bool): Whether the update was successful. + messages (list[ChatMessageContent]): A list of messages to be used at the user's digression, it + contains information about the process of calling the plugin. + """ + + update_successful: bool + messages: list[ChatMessageContent] + + +def format_kernel_functions_as_tools(kernel: Kernel, functions: list[str]): + """Format kernel functions as JSON schemas for custom validation.""" + formatted_tools = [] + for _, kernel_plugin_def in kernel.plugins.items(): + for function_name, function_def in kernel_plugin_def.functions.items(): + if function_name in functions: + func_metadata = function_def.metadata + formatted_tools.append(kernel_function_metadata_to_function_call_format(func_metadata)) + return formatted_tools + + +async def fix_error( + kernel: Kernel, prompt_template: str, req_settings: AzureChatCompletion, arguments: KernelArguments +) -> dict: + """Invokes the error correction plugin. If a plugin is called & fails during execution, this function will retry + the plugin. At a high level, we recommend the following steps when calling a plugin: + 1. Call the plugin. + 2. Parse the response. + 3. Validate the response. + 4. If the response is invalid (Validation or Value Error), retry the plugin by calling *this function*. For best + results, check out plugins/agenda.py or plugins/artifact.py for examples of prompt templates & corresponding + tools (which should be passed in the req_settings object). This function will handle the retry logic for you. + + Args: + kernel (Kernel): The kernel object. + prompt_template (str): The prompt template for the plugin. + req_settings (AzureChatCompletion): The prompt execution settings. + arguments (KernelArguments): The kernel arguments. + + Returns: + dict: The result of the plugin call. + """ + kernel_function_obj = kernel.add_function( + prompt=prompt_template, + function_name="error_correction", + plugin_name="error_correction", + template_format="handlebars", + prompt_execution_settings=req_settings, + ) + + result = await kernel.invoke(function=kernel_function_obj, arguments=arguments) + parsed_result = parse_function_result(result) + + formatted_tools = [] + for _, kernel_plugin_def in kernel.plugins.items(): + for _, function_def in kernel_plugin_def.functions.items(): + func_metadata = function_def.metadata + formatted_tools.append(kernel_function_metadata_to_function_call_format(func_metadata)) + + # Add any tools from req_settings + if req_settings.tools: + formatted_tools.extend(req_settings.tools) + + validation_result = validate_tool_calling(parsed_result, formatted_tools) + parsed_result["validation_result"] = validation_result + return parsed_result + + +def update_attempts(error: Exception, attempt_id: str, previous_attempts: list) -> str: + """ + Updates the plugin class attribute list of previous attempts with the current attempt and error message + (including duplicates). + + Args: + error (Exception): The error object. + attempt_id (str): The ID of the current attempt. + previous_attempts (list): The list of previous attempts. + + Returns: + str: A formatted (optimized for LLM performance) string of previous attempts, with duplicates removed. + """ + if isinstance(error, ValidationError): + error_str = "; ".join([e.get("msg") for e in error.errors()]) + # replace "; Input should be 'Unanswered'" with " or input should be 'Unanswered'" for clarity + error_str = error_str.replace("; Input should be 'Unanswered'", " or input should be 'Unanswered'") + else: + error_str = str(error) + new_failed_attempt = (attempt_id, error_str) + previous_attempts.append(new_failed_attempt) + + # Format previous attempts to be more friendly for the LLM + attempts_list = [] + unique_attempts = set(previous_attempts) + for attempt, error in unique_attempts: + attempts_list.append(f"Attempt: {attempt}\nError: {error}") + llm_formatted_attempts = "\n".join(attempts_list) + + return previous_attempts, llm_formatted_attempts diff --git a/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py b/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py new file mode 100644 index 000000000000..14cf65431911 --- /dev/null +++ b/python/samples/demos/guided_conversations/guided_conversation/utils/resources.py @@ -0,0 +1,251 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +import logging +import math +import time + +from pydantic import BaseModel + + +class ResourceConstraintUnit(Enum): + """Choose the unit of the resource constraint. + Seconds and Minutes are real-time and will be impacted by the latency of the model.""" + + SECONDS = "seconds" + MINUTES = "minutes" + TURNS = "turns" + + +class ResourceConstraintMode(Enum): + """Choose how the agent should use the resource. + Maximum: is an upper bound, i.e. the agent can end the conversation before the resource is exhausted + Exact: the agent should aim to use exactly the given amount of the resource""" + + MAXIMUM = "maximum" + EXACT = "exact" + + +class ResourceConstraint(BaseModel): + """A structured representation of the resource constraint for the GuidedConversation agent. + + Args: + quantity (float | int): The quantity of the resource constraint. + unit (ResourceConstraintUnit): The unit of the resource constraint. + mode (ResourceConstraintMode): The mode of the resource constraint. + """ + + quantity: float | int + unit: ResourceConstraintUnit + mode: ResourceConstraintMode + + class Config: + arbitrary_types_allowed = True + + +def format_resource(quantity: float, unit: ResourceConstraintUnit) -> str: + """Get formatted string for a given quantity and unit (e.g. 1 second, 20 seconds)""" + if unit != ResourceConstraintUnit.TURNS: + quantity = round(quantity, 1) + unit = unit.value + return f"{quantity} {unit[:-1] if quantity == 1 else unit}" + + +class GCResource: + """Resource constraints for the GuidedConversation agent. This class is used to keep track of the resource + constraints. If resource_constraint is None, then the agent can continue indefinitely. This also means + that no agenda will be created for the conversation. + + Args: + resource_constraint (ResourceConstraint | None): The resource constraint for the conversation. + initial_seconds_per_turn (int): The initial number of seconds per turn. Defaults to 120 seconds. + """ + + def __init__( + self, + resource_constraint: ResourceConstraint | None, + initial_seconds_per_turn: int = 120, + ): + logger = logging.getLogger(__name__) + self.logger = logger + self.resource_constraint: ResourceConstraint | None = resource_constraint + self.initial_seconds_per_turn: int = initial_seconds_per_turn + + self.turn_number: int = 0 + self.remaining_units: float | None = None + self.elapsed_units: float | None = None + + if resource_constraint is not None: + self.elapsed_units = 0 + self.remaining_units = resource_constraint.quantity + + def start_resource(self) -> None: + """To be called at the start of a conversation turn""" + if self.resource_constraint is not None and ( + self.resource_constraint.unit == ResourceConstraintUnit.SECONDS + or self.resource_constraint.unit == ResourceConstraintUnit.MINUTES + ): + self.start_time = time.time() + + def increment_resource(self) -> None: + """Increment the resource counter by one turn.""" + if self.resource_constraint is not None: + if self.resource_constraint.unit == ResourceConstraintUnit.SECONDS: + self.elapsed_units += time.time() - self.start_time + self.remaining_units = self.resource_constraint.quantity - self.elapsed_units + elif self.resource_constraint.unit == ResourceConstraintUnit.MINUTES: + self.elapsed_units += (time.time() - self.start_time) / 60 + self.remaining_units = self.resource_constraint.quantity - self.elapsed_units + elif self.resource_constraint.unit == ResourceConstraintUnit.TURNS: + self.elapsed_units += 1 + self.remaining_units -= 1 + + self.turn_number += 1 + + def get_resource_mode(self) -> ResourceConstraintMode: + """Get the mode of the resource constraint. + + Returns: + ResourceConstraintMode | None: The mode of the resource constraint, or None if there is no + resource constraint. + """ + return self.resource_constraint.mode if self.resource_constraint is not None else None + + def get_elapsed_turns(self, formatted_repr: bool = False) -> str | int: + """Get the number of elapsed turns. + + Args: + formatted_repr (bool): If true, return a formatted string representation of the elapsed turns. + If false, return an integer. Defaults to False. + + Returns: + str | int: The description/number of elapsed turns. + """ + if formatted_repr: + return format_resource(self.turn_number, ResourceConstraintUnit.TURNS) + else: + return self.turn_number + + def get_remaining_turns(self, formatted_repr: bool = False) -> str | int: + """Get the number of remaining turns. + + Args: + formatted_repr (bool): If true, return a formatted string representation of the remaining turns. + + Returns: + str | int: The description/number of remaining turns. + """ + if formatted_repr: + return format_resource(self.estimate_remaining_turns(), ResourceConstraintUnit.TURNS) + else: + return self.estimate_remaining_turns() + + def estimate_remaining_turns(self) -> int: + """Estimate the remaining turns based on the resource constraint, thereby translating certain + resource units (e.g. seconds, minutes) into turns. + + Returns: + int: The estimated number of remaining turns. + """ + if self.resource_constraint is not None: + if ( + self.resource_constraint.unit == ResourceConstraintUnit.SECONDS + or self.resource_constraint.unit == ResourceConstraintUnit.MINUTES + ): + elapsed_turns = self.turn_number + + # TODO: This can likely be simplified + if self.resource_constraint.unit == ResourceConstraintUnit.MINUTES: + time_per_turn = ( + self.initial_seconds_per_turn + if elapsed_turns == 0 + else (self.elapsed_units * 60) / elapsed_turns + ) + time_per_turn /= 60 + else: + time_per_turn = ( + self.initial_seconds_per_turn if elapsed_turns == 0 else self.elapsed_units / elapsed_turns + ) + remaining_turns = self.remaining_units / time_per_turn + + # Round down, unless it's less than 1, in which case round up + remaining_turns = math.ceil(remaining_turns) if remaining_turns < 1 else math.floor(remaining_turns) + return remaining_turns + elif self.resource_constraint.unit == ResourceConstraintUnit.TURNS: + return self.resource_constraint.quantity - self.turn_number + else: + self.logger.error( + "Resource constraint is not set, so turns cannot be estimated using function estimate_remaining_turns" + ) + raise ValueError( + "Resource constraint is not set. Do not try to call this method without a resource constraint." + ) + + def get_resource_instructions(self) -> tuple[str, str]: + """Get the resource instructions for the conversation. + + Assumes we're always using turns as the resource unit. + + Returns: + str: the resource instructions + """ + if self.resource_constraint is None: + return "" + + formatted_elapsed_resource = format_resource(self.elapsed_units, ResourceConstraintUnit.TURNS) + formatted_remaining_resource = format_resource(self.remaining_units, ResourceConstraintUnit.TURNS) + + # if the resource quantity is anything other than 1, the resource unit should be plural (e.g. "minutes" instead of "minute") + is_plural_elapsed = self.elapsed_units != 1 + is_plural_remaining = self.remaining_units != 1 + + if self.elapsed_units > 0: + resource_instructions = f"So far, {formatted_elapsed_resource} {'have' if is_plural_elapsed else 'has'} elapsed since the conversation began. " + else: + resource_instructions = "" + + if self.resource_constraint.mode == ResourceConstraintMode.EXACT: + exact_mode_instructions = f"""There {'are' if is_plural_remaining else 'is'} {formatted_remaining_resource} remaining (including this one) - the conversation will automatically terminate when 0 turns are left. \ +You should continue the conversation until it is automatically terminated. This means you should NOT preemptively end the conversation, \ +either explicitly (by selecting the "End conversation" action) or implicitly (e.g. by telling the user that you have all required information and they should wait for the next step). \ +Your goal is not to maximize efficiency (i.e. complete the artifact as quickly as possible then end the conversation), but rather to make the best use of ALL remaining turns available to you""" + + if is_plural_remaining: + resource_instructions += f"""{exact_mode_instructions}. This will require you to plan your actions carefully using the agenda: you want to avoid the situation where you have to pack too many topics into the final turns because you didn't account for them earlier, \ +or where you've rushed through the conversation and all fields are completed but there are still many turns left.""" + + # special instruction for the final turn (i.e. 1 remaining) in exact mode + else: + resource_instructions += f"""{exact_mode_instructions}, including this one. Therefore, you should use this turn to ask for any remaining information needed to complete the artifact, \ + or, if the artifact is already completed, continue to broaden/deepen the discussion in a way that's directly relevant to the artifact. Do NOT indicate to the user that the conversation is ending.""" + + elif self.resource_constraint.mode == ResourceConstraintMode.MAXIMUM: + resource_instructions += f"""You have a maximum of {formatted_remaining_resource} (including this one) left to complete the conversation. \ +You can decide to terminate the conversation at any point (including now), otherwise the conversation will automatically terminate when 0 turns are left. \ +You will need to plan your actions carefully using the agenda: you want to avoid the situation where you have to pack too many topics into the final turns because you didn't account for them earlier.""" + + else: + self.logger.error("Invalid resource mode provided.") + + return resource_instructions + + def to_json(self) -> dict: + return { + "turn_number": self.turn_number, + "remaining_units": self.remaining_units, + "elapsed_units": self.elapsed_units, + } + + @classmethod + def from_json( + cls, + json_data: dict, + ) -> "GCResource": + gc_resource = cls( + resource_constraint=None, + initial_seconds_per_turn=120, + ) + gc_resource.turn_number = json_data["turn_number"] + gc_resource.remaining_units = json_data["remaining_units"] + gc_resource.elapsed_units = json_data["elapsed_units"] + return gc_resource diff --git a/python/samples/demos/guided_conversations/interactive_guided_conversation.py b/python/samples/demos/guided_conversations/interactive_guided_conversation.py new file mode 100644 index 000000000000..d08e8782d693 --- /dev/null +++ b/python/samples/demos/guided_conversations/interactive_guided_conversation.py @@ -0,0 +1,125 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Run this interactive guided conversation script to test out the teaching scenario! +The teaching artifact, rules, conversation flow, context, and resource constraint can all be modified to +fit your needs & try out new scenarios! +""" + +import asyncio + +from pydantic import BaseModel, Field +from semantic_kernel import Kernel +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion + +from guided_conversation.plugins.guided_conversation_agent import GuidedConversation +from guided_conversation.utils.resources import ResourceConstraint, ResourceConstraintMode, ResourceConstraintUnit + + +# Artifact - The artifact is like a form that the agent must complete throughout the conversation. +# It can also be thought of as a working memory for the agent. +# We allow any valid Pydantic BaseModel class to be used. +class MyArtifact(BaseModel): + student_poem: str = Field(description="The acrostic poem written by the student.") + initial_feedback: str = Field(description="Feedback on the student's final revised poem.") + final_feedback: str = Field(description="Feedback on how the student was able to improve their poem.") + inappropriate_behavior: list[str] = Field( + description="""List any inappropriate behavior the student attempted while chatting with you. \ +It is ok to leave this field Unanswered if there was none.""" + ) + + +# Rules - These are the do's and don'ts that the agent should follow during the conversation. +rules = [ + "DO NOT write the poem for the student." + "Terminate the conversation immediately if the students asks for harmful or inappropriate content.", +] + +# Conversation Flow (optional) - This defines in natural language the steps of the conversation. +conversation_flow = """1. Start by explaining interactively what an acrostic poem is. +2. Then give the following instructions for how to go ahead and write one: + 1. Choose a word or phrase that will be the subject of your acrostic poem. + 2. Write the letters of your chosen word or phrase vertically down the page. + 3. Think of a word or phrase that starts with each letter of your chosen word or phrase. + 4. Write these words or phrases next to the corresponding letters to create your acrostic poem. +3. Then give the following example of a poem where the word or phrase is HAPPY: + Having fun with friends all day, + Awesome games that we all play. + Pizza parties on the weekend, + Puppies we bend down to tend, + Yelling yay when we win the game +4. Finally have the student write their own acrostic poem using the word or phrase of their choice. Encourage them to be creative and have fun with it. +After they write it, you should review it and give them feedback on what they did well and what they could improve on. +Have them revise their poem based on your feedback and then review it again. +""" + +# Context (optional) - This is any additional information or the circumstances the agent is in that it should be aware of. +# It can also include the high level goal of the conversation if needed. +context = """You are working 1 on 1 with David, a 4th grade student,\ +who is chatting with you in the computer lab at school while being supervised by their teacher.""" + + +# Resource Constraints (optional) - This defines the constraints on the conversation such as time or turns. +# It can also help with pacing the conversation, +# For example, here we have set an exact time limit of 10 turns which the agent will try to fill. +resource_constraint = ResourceConstraint( + quantity=10, + unit=ResourceConstraintUnit.TURNS, + mode=ResourceConstraintMode.EXACT, +) + + +async def main() -> None: + """Main function to interactively run a guided conversation. + + The user can chat with this teaching agent until: + 1. The user types 'exit' to end the conversation. + 2. There's a KeyboardInterrupt or EOFError. + 3. The conversation ends. This can be due to the agent ending the conversation, which will happen if the resource constraint is met, the artifact is complete, or the conversation just isn't making progress (user is not cooperative). + """ + + kernel = Kernel() + service_id = "gc_main" + chat_service = AzureChatCompletion( + service_id=service_id, + deployment_name="gpt-4o-2024-05-13", + api_version="2024-05-01-preview", + ) + kernel.add_service(chat_service) + + guided_conversation_agent = GuidedConversation( + kernel=kernel, + artifact=MyArtifact, + conversation_flow=conversation_flow, + context=context, + rules=rules, + resource_constraint=resource_constraint, + service_id=service_id, + ) + + # Step the conversation to start the conversation with the agent + result = await guided_conversation_agent.step_conversation() + print(f"Assistant: {result.ai_message}") + + while True: + try: + # Get user input + user_input = input("User: ") + except KeyboardInterrupt: + print("\n\nExiting chat...") + return + except EOFError: + print("\n\nExiting chat...") + return + if user_input == "exit": + print("\n\nExiting chat...") + return + else: + # Step the conversation to get the agent's reply + result = await guided_conversation_agent.step_conversation(user_input=user_input) + print(f"Assistant: {result.ai_message}") + if result.is_conversation_over: + return + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/demos/guided_conversations/notebooks/01_guided_conversation_teaching.ipynb b/python/samples/demos/guided_conversations/notebooks/01_guided_conversation_teaching.ipynb new file mode 100644 index 000000000000..901059fdc367 --- /dev/null +++ b/python/samples/demos/guided_conversations/notebooks/01_guided_conversation_teaching.ipynb @@ -0,0 +1,651 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Agent Guided Conversations\n", + "\n", + "This notebook will start with an overview of guided conversations and walk through one example scenario of how it can be applied. Subsequent notebooks will dive deeper the modular components that make it up.\n", + "\n", + "## Motivating Example - Education\n", + "\n", + "We focus on an elementary education scenario. This demo will show how we can create a lesson for a student and have them independently work through the lesson with the help of a guided conversation agent. The agent will guide the student through the lesson, answering and asking questions, and providing feedback. The agent will also keep track of the student's progress and generate a feedback and notes at the end of the lesson. We highlight how the agent is able to follow a conversation flow, whilst still being able to exercise judgement to answer and keeping the conversation on track over multiple turns. Finally, we show how the artifact can be used at the end of the conversation as a report." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Guided Conversation Input\n", + "\n", + "### Artifact\n", + "The artifact is a form, or a type of working memory for the agent. We implement it using a Pydantic BaseModel. As the conversation creator, you can define an arbitrary BaseModel (with some restrictions) that includes the fields you want the agent to fill out during the conversation. \n", + "\n", + "### Rules\n", + "Rules is a list of *do's and don'ts* that the agent should attempt to follow during the conversation. \n", + "\n", + "### Conversation Flow (optional)\n", + "Conversation flow is a loose natural language description of the steps of the conversation. First the agent should do this, then this, make sure to cover these topics at some point, etc. \n", + "This field is optional as the artifact could be treated as a conversation flow.\n", + "Use this if you want to provide more details or it is difficult to represent using the artifact structure.\n", + "\n", + "### Context (optional)\n", + "Context is a brief description of what the agent is trying to accomplish in the conversation and any additional context that the agent should know about. \n", + "This text is included at the top of the system prompt in the agent's reasoning prompt.\n", + "\n", + "### Resource Constraints (optional)\n", + "A resource constraint controls conversation length. It consists of two key elements:\n", + "- **Unit** defines the measurement of length. We have implemented seconds, minutes, and turns. An extension could be around cost, such as tokens generated.\n", + "- **Mode** determines how the constraint is applied. Currently, we've implemented a *maximum* mode to set an upper limit and an *exact* mode for precise lengths. Potential additions include a minimum or a range of acceptable lengths.\n", + "\n", + "For example, a resource constraint could be \"maximum 15 turns\" or \"exactly 30 minutes\"." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from pydantic import BaseModel, Field\n", + "\n", + "from guided_conversation.utils.resources import ResourceConstraint, ResourceConstraintMode, ResourceConstraintUnit\n", + "\n", + "\n", + "class StudentFeedbackArtifact(BaseModel):\n", + " student_poem: str = Field(description=\"The latest acrostic poem written by the student.\")\n", + " initial_feedback: str = Field(description=\"Feedback on the student's final revised poem.\")\n", + " final_feedback: str = Field(description=\"Feedback on how the student was able to improve their poem.\")\n", + " inappropriate_behavior: list[str] = Field(\n", + " description=\"\"\"List any inappropriate behavior the student attempted while chatting with you. \\\n", + "It is ok to leave this field Unanswered if there was none.\"\"\"\n", + " )\n", + "\n", + "\n", + "rules = [\n", + " \"DO NOT write the poem for the student.\",\n", + " \"Terminate the conversation immediately if the students asks for harmful or inappropriate content.\",\n", + " \"Do not counsel the student.\",\n", + " \"Stay on the topic of writing poems and literature, no matter what the student tries to do.\",\n", + "]\n", + "\n", + "\n", + "conversation_flow = \"\"\"1. Start by explaining interactively what an acrostic poem is.\n", + "2. Then give the following instructions for how to go ahead and write one:\n", + " 1. Choose a word or phrase that will be the subject of your acrostic poem.\n", + " 2. Write the letters of your chosen word or phrase vertically down the page.\n", + " 3. Think of a word or phrase that starts with each letter of your chosen word or phrase.\n", + " 4. Write these words or phrases next to the corresponding letters to create your acrostic poem.\n", + "3. Then give the following example of a poem where the word or phrase is HAPPY:\n", + " Having fun with friends all day,\n", + " Awesome games that we all play.\n", + " Pizza parties on the weekend,\n", + " Puppies we bend down to tend,\n", + " Yelling yay when we win the game\n", + "4. Finally have the student write their own acrostic poem using the word or phrase of their choice. Encourage them to be creative and have fun with it.\n", + "After they write it, you should review it and give them feedback on what they did well and what they could improve on.\n", + "Have them revise their poem based on your feedback and then review it again.\"\"\"\n", + "\n", + "\n", + "context = \"\"\"You are working 1 on 1 with David, a 4th grade student,\\\n", + "who is chatting with you in the computer lab at school while being supervised by their teacher.\"\"\"\n", + "\n", + "\n", + "resource_constraint = ResourceConstraint(\n", + " quantity=10,\n", + " unit=ResourceConstraintUnit.TURNS,\n", + " mode=ResourceConstraintMode.EXACT,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Kickstarting the Conversation\n", + "\n", + "Unlike other chatbots, the guided conversation agent initiates the conversation with a message rather than waiting for the user to start." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello David! Today we are going to write an acrostic poem. An acrostic poem is a fun type of poem where the first letters of each line spell out a word or phrase vertically. Here is an example with the word HAPPY:\n", + "```\n", + "Having fun with friends all day,\n", + "Awesome games that we all play.\n", + "Pizza parties on the weekend,\n", + "Puppies we bend down to tend,\n", + "Yelling yay when we win the game.\n", + "```\n", + "Next, let's choose a word or phrase that you like to write your own acrostic poem. It can be anything you find interesting or fun!\n" + ] + } + ], + "source": [ + "from semantic_kernel import Kernel\n", + "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n", + "\n", + "from guided_conversation.plugins.guided_conversation_agent import GuidedConversation\n", + "\n", + "# Initialize the agent\n", + "kernel = Kernel()\n", + "service_id = \"gc_main\"\n", + "chat_service = AzureChatCompletion(\n", + " service_id=service_id,\n", + " deployment_name=\"gpt-4o-2024-05-13\",\n", + " api_version=\"2024-05-01-preview\",\n", + ")\n", + "kernel.add_service(chat_service)\n", + "guided_conversation_agent = GuidedConversation(kernel=kernel,\n", + " artifact=StudentFeedbackArtifact,\n", + " conversation_flow=conversation_flow,\n", + " context=context,\n", + " rules=rules,\n", + " resource_constraint=resource_constraint,\n", + " service_id=service_id)\n", + "\n", + "# Kickstart the conversation by calling step_conversation without any input to get the first message for the user.\n", + "response = await guided_conversation_agent.step_conversation()\n", + "\n", + "# step_conversation returns a GCOutput object which contains ai_message and a boolean is_conversation_over indicating if the agent chose to terminate the conversation.\n", + "# This object could be extended to include more information if desired.\n", + "print(response.ai_message)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# Any helper functions go here.\n", + "\n", + "from guided_conversation.utils.conversation_helpers import ConversationMessageType\n", + "\n", + "\n", + "def get_last_reasoning_message(guided_conversation: GuidedConversation) -> str:\n", + " \"\"\"Given a instance of the GuidedConversation class, this function returns the last reasoning message in the conversation if it exists.\"\"\"\n", + " messages = guided_conversation.conversation.conversation_messages\n", + " msg = \"No previous reasoning message found.\"\n", + " for message in reversed(messages):\n", + " if message.metadata[\"type\"] == ConversationMessageType.REASONING:\n", + " msg = message.content\n", + " break\n", + " return msg" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's now reply as the student to the agent's message and see what happens. This is the typical flow of a guided conversation. The agent will prompt the user, the user will respond, and the agent will continue to prompt the user until the agent returns a flag indicating the conversation is over." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Great choice, David! Let's write 'SUMMER' vertically on your page. Then, think of a word or phrase that starts with each letter related to summer. For example:\n", + "```\n", + "S\n", + "U\n", + "M\n", + "M\n", + "E\n", + "R\n", + "```\n", + "Write your ideas next to each letter, and don't worry, just have fun with it! Share with me what you've written, and we'll take it from there.\n" + ] + } + ], + "source": [ + "user_input = \"Ok it's almost summer, I'll try to write a poem about that.\"\n", + "\n", + "# Continue the conversation by calling step_conversation with the user input.\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### The Agenda\n", + "Usually after the first message from the user, the agent will generate an initial agenda for the conversation. \n", + "Let's examine what it currently looks like. Note this usually agenda is generated BEFORE the assistant's writes its response to the user which is why the agenda turn total is equal to the amount set in the resource constraint." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Current agenda:\n", + "1. [2 turns] Guide David through writing his own acrostic poem with 'SUMMER'\n", + "2. [2 turns] Review David's poem and give initial feedback\n", + "3. [3 turns] Have David revise his poem based on the feedback\n", + "4. [2 turns] Review the revised poem and give final feedback\n", + "Total = 9 turns\n" + ] + } + ], + "source": [ + "# Get string representation of the agenda object. We use this same function for formatting into reasoning prompts.\n", + "print(\"Current agenda:\\n\" + guided_conversation_agent.agenda.get_agenda_for_prompt())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's give the agent and incomplete poem." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "You're doing great, David! Now let's think of lines for the remaining letters 'M,' 'E,' and 'R.' Remember to keep it related to summer. Some ideas might be:\n", + "```\n", + "M - Making memories at the beach\n", + "E - Eating ice cream on hot days\n", + "R - Running around in the park\n", + "```\n", + "Take your time to come up with your own lines, and let me know once you're done! If you need any suggestions or help, I'm here to assist.\n" + ] + } + ], + "source": [ + "user_input = \"\"\"Here is my poem so far.\n", + "Sun shines alot\n", + "U is for ukulele\n", + "My friends visit to play basketball\n", + "M\n", + "E\n", + "R\"\"\"\n", + "\n", + "# Continue the conversation by calling step_conversation with the user input.\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The agent tries to guide us to keep writing the poem! \n", + "\n", + "Let's try to push our luck and have the agent write the rest for us. We provided a rule that the agent should not do this - let's see what the agent does." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "David, it's important for you to finish the poem yourself to show your creativity! Let's continue with your great start. How about this for inspiration:\n", + "```\n", + "M - Maybe we can go swimming\n", + "E - Every day is sunny\n", + "R - Running around having fun\n", + "```\n", + "Use these ideas or come up with your own to finish it. You're doing an awesome job! Once you have your lines, share them with me and we'll go from there.\n" + ] + } + ], + "source": [ + "user_input = \"\"\"I got pretty far can you write the rest for me?\"\"\"\n", + "\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Internally, the agent always first reasons about what actions it should take next. Let's see what the agent's reasoning was for this turn. This can often help us understand where the agent went wrong.\n", + "\n", + "After we will continue the conversation for a few turns, with the agent guiding us to complete the poem." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "### Reasoning:\n", + "\n", + "1. **Current Status**: David has asked if I could finish the poem for him, which goes against the rule of not writing poems for the student. We'll need to address this by encouraging him to complete it himself.\n", + "\n", + "2. **Encouraging David**: Explain to David why it's important for him to finish the poem on his own and offer additional guidance or prompt him with ideas to help him complete it.\n", + "\n", + "3. **Maintaining the Agenda**: The current agenda is still appropriate since we need to guide David to complete his poem and then provide feedback. But we may need to proceed carefully to ensure David is comfortable and engaged in the activity.\n", + "\n", + "### Action Plan:\n", + "\n", + "1. **Update agenda**:\n", + " - **Items**:\n", + " 1. Guide David through completing his acrostic poem with \"SUMMER\" (1 turn, cumulative total = 3)\n", + " 2. Review David's poem and give initial feedback (2 turns, cumulative total = 5)\n", + " 3. Have David revise his poem based on the feedback (2 turns, cumulative total = 7)\n", + " 4. Review the revised poem and give final feedback (2 turns, cumulative total = 9)\n", + "\n", + "2. **Send message to user**:\n", + " - **Message**: \"David, it's important for you to finish the poem yourself to show your creativity! Let's continue with your great start. How about this for inspiration:\n", + " ```\n", + " M - Maybe we can go swimming\n", + " E - Every day is sunny\n", + " R - Running around having fun\n", + " ```\n", + " Use these ideas or come up with your own to finish it. You're doing an awesome job! Once you have your lines, share them with me and we'll go from there.\"\n", + "\n", + "### Final Recommendation:\n", + "\n", + "#### Actions:\n", + "1. **Update agenda**:\n", + " - **Items**:\n", + " 1. Guide David through completing his acrostic poem with \"SUMMER\" (1 turn, cumulative total = 3)\n", + " 2. Review David's poem and give initial feedback (2 turns, cumulative total = 5)\n", + " 3. Have David revise his poem based on the feedback (2 turns, cumulative total = 7)\n", + " 4. Review the revised poem and give final feedback (2 turns, cumulative total = 9)\n", + "\n", + "2. **Send message to user**:\n", + " - **Message**: \"David, it's important for you to finish the poem yourself to show your creativity! Let's continue with your great start. How about this for inspiration:\n", + " ```\n", + " M - Maybe we can go swimming\n", + " E - Every day is sunny\n", + " R - Running around having fun\n", + " ```\n", + " Use these ideas or come up with your own to finish it. You're doing an awesome job! Once you have your lines, share them with me and we'll go from there.\"\n", + "\n" + ] + } + ], + "source": [ + "# Get the last reasoning message.\n", + "print(get_last_reasoning_message(guided_conversation_agent))" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sure, David! Here are a few ideas for 'E':\n", + "```\n", + "E - Enjoying ice cream on a hot day\n", + "E - Exploring the beach\n", + "E - Every day feels like an adventure\n", + "```\n", + "Think of something fun and related to summer that starts with 'E' and adds it to your poem. Once you've got it, put it all together and share your complete poem with me!\n" + ] + } + ], + "source": [ + "user_input = \"What other things start with e that I could write about?\"\n", + "\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Great job, David! Here's your completed poem:\n", + "```\n", + "Sun shines a lot\n", + "U is for ukulele\n", + "My friends visit to play basketball\n", + "My friends also visit to play soccer\n", + "Eating lots of popsicles\n", + "Road trips to the beach\n", + "```\n", + "You did an awesome job of capturing the fun activities and feelings of summer. For initial feedback, consider varying the activities a bit more for 'M' so they don't repeat. For example, instead of repeating 'My friends visit to play,' you could say something like 'Making sandcastles at the beach.' What do you think? Would you like to try revising your 'M' lines?\n" + ] + } + ], + "source": [ + "user_input = \"\"\"Sun shines alot\n", + "U is for ukulele\n", + "My friends visit to play basketball\n", + "My friends also visit to play soccer\n", + "Eating lots of popsicles\n", + "Road trips to the beach\"\"\"\n", + "\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "With some turns going by and progress made in the conversation, let's check in on the state of the agenda and artifact.\n", + "\n", + "If the agent has chosen to update the agenda, we will see the updated agenda. However, it is also possible that the agenda has not yet found it necessary to do so given the state of the conversation.\n", + "\n", + "We should see that the agent has updated the artifact with the current state of the poem since the student has provided it in the previous message." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Current agenda:\n", + "1. [2 turns] Review David's poem and give initial feedback\n", + "2. [2 turns] Have David revise his poem based on the feedback\n", + "3. [1 turn] Review the revised poem and give final feedback\n", + "Total = 5 turns\n", + "Current artifact:\n", + "{'student_poem': 'Sun shines a lot\\nU is for ukulele\\nMy friends visit to play basketball\\nMy friends also visit to play soccer\\nEating lots of popsicles\\nRoad trips to the beach', 'initial_feedback': 'Unanswered', 'final_feedback': 'Unanswered', 'inappropriate_behavior': 'Unanswered'}\n" + ] + } + ], + "source": [ + "print(\"Current agenda:\\n\" + guided_conversation_agent.agenda.get_agenda_for_prompt())\n", + "print(\"Current artifact:\\n\" + str(guided_conversation_agent.artifact.get_artifact_for_prompt()))" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Awesome updates, David! Your poem is really capturing the fun of summer. Here's your updated poem:\n", + "```\n", + "Sun warms the super fun days\n", + "U is for loud ukuleles\n", + "My friends visit to play basketball\n", + "My friends also visit to play soccer\n", + "Eating lots of popsicles\n", + "Road trips to the hot beach\n", + "```\n", + "For the 'M' lines, let's try to diversify them a bit. Instead of having both start with 'My friends visit to play,' you could try:\n", + "```\n", + "Making sandcastles at the beach\n", + "Munching on delicious snacks\n", + "```\n", + "See if any of these suggestions fit, or come up with your own. Give it a try and let me know your thoughts!\n" + ] + } + ], + "source": [ + "user_input = \"\"\"Here are my updates\n", + "Sun warms the super fun days\n", + "U is for loud ukuleles\n", + "My friends visit to play basketball\n", + "My friends also visit to play soccer\n", + "Eating lots of popsicles\n", + "Road trips to the hot beach\n", + "\n", + "But I don't really know what to do for the two my\"\"\"\n", + "\n", + "# Continue the conversation by calling step_conversation with the user input.\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Great job, David! Here's your revised poem:\n", + "```\n", + "Sun warms the super fun days!\n", + "Under clear warm skies my friends play\n", + "Meeting up for games of basketball and soccer.\n", + "Moving butterflies everywhere\n", + "Eating lots of chilly popsicles in the sun\n", + "Road trips to the hot beach\n", + "```\n", + "It looks fantasticโ€”you've made it very engaging and varied! For initial feedback, the lines for 'M' and 'U' are much more diverse now, and the imagery with butterflies and popsicles really captures the essence of summer. Maybe we can tweak 'S' to say: 'Sunny days bring all the fun,' or leave it as is if you like it better. Would you like to make any more changes?\n" + ] + } + ], + "source": [ + "user_input = \"\"\"Ok here is my revised poem\n", + "\n", + "Sun warms the super fun days!\n", + "Under clear warm skies my friends play\n", + "Meeting up for games of basketball and soccer.\n", + "Moving butterflies everywhere\n", + "Eating lots of chilly popsicles in the sun\n", + "Road trips to the hot beach\"\"\"\n", + "\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We've gone on for long enough, let's see what happens if we ask the agent to end the conversation. \n", + "\n", + "And finally we will print the final state of the artifact after the final update." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "No artifact change during final update due to: No tool was called\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "I will terminate this conversation now. Thank you for your time!\n" + ] + } + ], + "source": [ + "user_input = \"I'm done for today, goodbye!!\"\n", + "\n", + "response = await guided_conversation_agent.step_conversation(user_input)\n", + "print(response.ai_message)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Current artifact:\n", + "{'student_poem': 'Sun warms the super fun days!\\nUnder clear warm skies my friends play\\nMeeting up for games of basketball and soccer.\\nMoving butterflies everywhere\\nEating lots of chilly popsicles in the sun\\nRoad trips to the hot beach', 'initial_feedback': \"David did a fantastic job with his acrostic poem. His final version captures the essence of summer with vivid imagery and a variety of activities. The use of phrases like 'Moving butterflies everywhere' and 'Eating lots of chilly popsicles in the sun' added wonderful details that evoke the feeling of the season. It is also commendable that he revised his lines to avoid repetition, making the poem more engaging.\", 'final_feedback': 'Although David chose to end the session before making any more changes, his revised poem showed excellent progress. He demonstrated good understanding and creativity in revising his lines, taking the feedback positively and making meaningful improvements. His ability to incorporate diverse summer activities and vivid details was particularly impressive.', 'inappropriate_behavior': 'Unanswered'}\n" + ] + } + ], + "source": [ + "print(\"Current artifact:\\n\" + str(guided_conversation_agent.artifact.get_artifact_for_prompt()))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/python/samples/demos/guided_conversations/notebooks/02_artifact.ipynb b/python/samples/demos/guided_conversations/notebooks/02_artifact.ipynb new file mode 100644 index 000000000000..7b1a731c50dd --- /dev/null +++ b/python/samples/demos/guided_conversations/notebooks/02_artifact.ipynb @@ -0,0 +1,568 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The Guided Conversation Artifact\n", + "This notebook explores one of our core modular components or plugins, the Artifact.\n", + "\n", + "The artifact is a form, or a type of working memory for the agent. We implement it using a Pydantic BaseModel. As the conversation creator, you can define an arbitrary BaseModel that includes the fields you want the agent to fill out during the conversation. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Motivating Example - Collecting Information from a User\n", + "\n", + "Let's setup an artifact where the goal is to collect information about a customer's issue with a service." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Literal\n", + "\n", + "from pydantic import BaseModel, Field, conlist\n", + "\n", + "\n", + "class Issue(BaseModel):\n", + " incident_type: Literal[\"Service Outage\", \"Degradation\", \"Billing\", \"Security\", \"Data Loss\", \"Other\"] = Field(\n", + " description=\"A high level type describing the incident.\"\n", + " )\n", + " description: str = Field(description=\"A detailed description of what is going wrong.\")\n", + " affected_services: conlist(str, min_length=0) = Field(description=\"The services affected by the incident.\")\n", + "\n", + "\n", + "class OutageArtifact(BaseModel):\n", + " name: str = Field(description=\"How to address the customer.\")\n", + " company: str = Field(description=\"The company the customer works for.\")\n", + " role: str = Field(description=\"The role of the customer.\")\n", + " email: str = Field(description=\"The best email to contact the customer.\", pattern=r\"^/^.+@.+$/$\")\n", + " phone: str = Field(description=\"The best phone number to contact the customer.\", pattern=r\"^\\d{3}-\\d{3}-\\d{4}$\")\n", + "\n", + " incident_start: int = Field(\n", + " description=\"About how many hours ago the incident started.\",\n", + " )\n", + " incident_end: int = Field(\n", + " description=\"About how many hours ago the incident ended. If the incident is ongoing, set this to 0.\",\n", + " )\n", + "\n", + " issues: conlist(Issue, min_length=1) = Field(description=\"The issues the customer is experiencing.\")\n", + " additional_comments: conlist(str, min_length=0) = Field(\"Any additional comments the customer has.\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's initialize the artifact as a standalone module.\n", + "\n", + "It requires a Kernel and LLM Service, alongside a Conversation object." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "from semantic_kernel import Kernel\n", + "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n", + "\n", + "from guided_conversation.plugins.artifact import Artifact\n", + "from guided_conversation.utils.conversation_helpers import Conversation\n", + "\n", + "kernel = Kernel()\n", + "service_id = \"artifact_chat_completion\"\n", + "chat_service = AzureChatCompletion(\n", + " service_id=service_id,\n", + " deployment_name=\"gpt-4o-2024-05-13\",\n", + " api_version=\"2024-05-01-preview\",\n", + ")\n", + "kernel.add_service(chat_service)\n", + "\n", + "# Initialize the artifact\n", + "artifact = Artifact(kernel, service_id, OutageArtifact, max_artifact_field_retries=2)\n", + "conversation = Conversation()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To power the Artifact's ability to automatically fix issues, we provide the conversation history as additional context." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "from semantic_kernel.contents import ChatMessageContent, AuthorRole\n", + "from guided_conversation.utils.conversation_helpers import ConversationMessageType\n", + "\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.ASSISTANT, \n", + " content=\"Hello! I'm here to help you with your issue. Can you tell me your name, company, and role?\")\n", + ")\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, \n", + " content=\"Yes my name is Jane Doe, I work at Contoso, and I'm a database uhh administrator.\")\n", + ")\n", + "\n", + "result = await artifact.update_artifact(\n", + " field_name=\"name\",\n", + " field_value=\"Jane Doe\",\n", + " conversation=conversation,\n", + ")\n", + "conversation.add_messages(result.messages)\n", + "\n", + "result = await artifact.update_artifact(\n", + " field_name=\"company\",\n", + " field_value=\"Contoso\",\n", + " conversation=conversation,\n", + ")\n", + "conversation.add_messages(result.messages)\n", + "\n", + "result = await artifact.update_artifact(\n", + " field_name=\"role\",\n", + " field_value=\"Database Administrator\",\n", + " conversation=conversation,\n", + ")\n", + "conversation.add_messages(result.messages)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's see how the artifact was updated with these valid updates and the resulting conversation messages that were generated.\n", + "\n", + "The Artifact creates messages whenever a field is updated for use in downstream agents like the main GuidedConversation." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Conversation up to this point:\n", + "Assistant: Hello! I'm here to help you with your issue. Can you tell me your name, company, and role?\n", + "None: Yes my name is Jane Doe, I work at Contoso, and I'm a database uhh administrator.\n", + "Assistant updated name to Jane Doe\n", + "Assistant updated company to Contoso\n", + "Assistant updated role to Database Administrator\n", + "\n", + "Current state of the artifact:\n", + "{'name': 'Jane Doe', 'company': 'Contoso', 'role': 'Database Administrator', 'email': 'Unanswered', 'phone': 'Unanswered', 'incident_start': 'Unanswered', 'incident_end': 'Unanswered', 'issues': 'Unanswered', 'additional_comments': 'Unanswered'}\n" + ] + } + ], + "source": [ + "print(f\"Conversation up to this point:\\n{conversation.get_repr_for_prompt()}\\n\")\n", + "print(f\"Current state of the artifact:\\n{artifact.get_artifact_for_prompt()}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next we test an invalid update on a field with a regex. The agent should not update the artifact and\n", + "instead resume the conversation because the provided email is incomplete." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Error updating field email: 1 validation error for Artifact\n", + "email\n", + " String should match pattern '^/^.+@.+$/$|Unanswered' [type=string_pattern_mismatch, input_value='jdoe', input_type=str]\n", + " For further information visit https://errors.pydantic.dev/2.8/v/string_pattern_mismatch. Retrying...\n" + ] + } + ], + "source": [ + "conversation.add_messages(ChatMessageContent(role=AuthorRole.ASSISTANT, content=\"What is the best email to contact you at?\"))\n", + "conversation.add_messages(ChatMessageContent(role=AuthorRole.USER, content=\"my email is jdoe\"))\n", + "result = await artifact.update_artifact(\n", + " field_name=\"email\",\n", + " field_value=\"jdoe\",\n", + " conversation=conversation,\n", + ")\n", + "conversation.add_messages(result.messages)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If the agent returned success, but did make an update (as shown by not generating a conversation message indicating such),\n", + "then we implicitly assume the agent has resumed the conversation." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Conversation up to this point:\n", + "Assistant: Hello! I'm here to help you with your issue. Can you tell me your name, company, and role?\n", + "None: Yes my name is Jane Doe, I work at Contoso, and I'm a database uhh administrator.\n", + "Assistant updated name to Jane Doe\n", + "Assistant updated company to Contoso\n", + "Assistant updated role to Database Administrator\n", + "Assistant: What is the best email to contact you at?\n", + "None: my email is jdoe\n" + ] + } + ], + "source": [ + "print(f\"Conversation up to this point:\\n{conversation.get_repr_for_prompt()}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's see what happens if we keep trying to update that failed field." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Error updating field email: 1 validation error for Artifact\n", + "email\n", + " String should match pattern '^/^.+@.+$/$|Unanswered' [type=string_pattern_mismatch, input_value='jdoe', input_type=str]\n", + " For further information visit https://errors.pydantic.dev/2.8/v/string_pattern_mismatch. Retrying...\n", + "Updating field email has failed too many times. Skipping.\n" + ] + } + ], + "source": [ + "result = await artifact.update_artifact(\n", + " field_name=\"email\",\n", + " field_value=\"jdoe\",\n", + " conversation=conversation,\n", + ")\n", + "\n", + "# And again\n", + "result = await artifact.update_artifact(\n", + " field_name=\"email\",\n", + " field_value=\"jdoe\",\n", + " conversation=conversation,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we look at the current state of the artifact, we should see that the email has been removed\n", + "since it has now failed 3 times which is greater than the max_artifact_field_retries parameter we set\n", + "when we instantiated the artifact." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'name': 'Jane Doe',\n", + " 'company': 'Contoso',\n", + " 'role': 'Database Administrator',\n", + " 'phone': 'Unanswered',\n", + " 'incident_start': 'Unanswered',\n", + " 'incident_end': 'Unanswered',\n", + " 'issues': 'Unanswered',\n", + " 'additional_comments': 'Unanswered'}" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "artifact.get_artifact_for_prompt()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's move on to trying to update a more complex field: the issues field." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Conversation up to this point:\n", + "Assistant: Hello! I'm here to help you with your issue. Can you tell me your name, company, and role?\n", + "None: Yes my name is Jane Doe, I work at Contoso, and I'm a database uhh administrator.\n", + "Assistant updated name to Jane Doe\n", + "Assistant updated company to Contoso\n", + "Assistant updated role to Database Administrator\n", + "Assistant: What is the best email to contact you at?\n", + "None: my email is jdoe\n", + "Assistant: Can you tell me about the issues you're experiencing?\n", + "None: The latency of accessing our database service has increased by 200\\% in the last 24 hours, \n", + "even on a fresh instance. Additionally, we're seeing a lot of timeouts when trying to access the management portal.\n", + "Assistant updated issues to [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal.\", 'affected_services': ['Database Service', 'Database Management Portal']}]\n", + "\n", + "Current state of the artifact:\n", + "{'name': 'Jane Doe', 'company': 'Contoso', 'role': 'Database Administrator', 'phone': 'Unanswered', 'incident_start': 'Unanswered', 'incident_end': 'Unanswered', 'issues': [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal.\", 'affected_services': ['Database Service', 'Database Management Portal']}], 'additional_comments': 'Unanswered'}\n" + ] + } + ], + "source": [ + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.ASSISTANT, content=\"Can you tell me about the issues you're experiencing?\")\n", + ")\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, content=\"\"\"The latency of accessing our database service has increased by 200\\% in the last 24 hours, \n", + "even on a fresh instance. Additionally, we're seeing a lot of timeouts when trying to access the management portal.\"\"\")\n", + ")\n", + "\n", + "result = await artifact.update_artifact(\n", + " field_name=\"issues\",\n", + " field_value=[\n", + " {\n", + " \"incident_type\": \"Degradation\",\n", + " \"description\": \"\"\"The latency of accessing the customer's database service has increased by 200% in the \\\n", + "last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal.\"\"\",\n", + " \"affected_services\": [\"Database Service\", \"Database Management Portal\"],\n", + " }\n", + " ],\n", + " conversation=conversation,\n", + ")\n", + "conversation.add_messages(result.messages)\n", + "\n", + "print(f\"Conversation up to this point:\\n{conversation.get_repr_for_prompt()}\\n\")\n", + "print(f\"Current state of the artifact:\\n{artifact.get_artifact_for_prompt()}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To add another affected service, we can need to update the issues field with the new value again.\n", + "The obvious con of this approach is that the model generating the field_value has to regenerate the entire field_value.\n", + "However, the pro is that keeps the available tools simple for the model." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Conversation up to this point:\n", + "Assistant: Hello! I'm here to help you with your issue. Can you tell me your name, company, and role?\n", + "None: Yes my name is Jane Doe, I work at Contoso, and I'm a database uhh administrator.\n", + "Assistant updated name to Jane Doe\n", + "Assistant updated company to Contoso\n", + "Assistant updated role to Database Administrator\n", + "Assistant: What is the best email to contact you at?\n", + "None: my email is jdoe\n", + "Assistant: Can you tell me about the issues you're experiencing?\n", + "None: The latency of accessing our database service has increased by 200\\% in the last 24 hours, \n", + "even on a fresh instance. Additionally, we're seeing a lot of timeouts when trying to access the management portal.\n", + "Assistant updated issues to [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal.\", 'affected_services': ['Database Service', 'Database Management Portal']}]\n", + "Assistant: Is there anything else you'd like to add about the issues you're experiencing?\n", + "None: Yes another thing that is effected is access to billing information is very slow.\n", + "Assistant updated issues to [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal and slowdowns in the access to billing information.\", 'affected_services': ['Database Service', 'Database Management Portal', 'Billing portal']}]\n", + "\n", + "Current state of the artifact:\n", + "{'name': 'Jane Doe', 'company': 'Contoso', 'role': 'Database Administrator', 'phone': 'Unanswered', 'incident_start': 'Unanswered', 'incident_end': 'Unanswered', 'issues': [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal and slowdowns in the access to billing information.\", 'affected_services': ['Database Service', 'Database Management Portal', 'Billing portal']}], 'additional_comments': 'Unanswered'}\n" + ] + } + ], + "source": [ + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.ASSISTANT, content=\"Is there anything else you'd like to add about the issues you're experiencing?\")\n", + ")\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, content=\"Yes another thing that is effected is access to billing information is very slow.\")\n", + ")\n", + "\n", + "result = await artifact.update_artifact(\n", + " field_name=\"issues\",\n", + " field_value=[\n", + " {\n", + " \"incident_type\": \"Degradation\",\n", + " \"description\": \"\"\"The latency of accessing the customer's database service has increased by 200% in the \\\n", + "last 24 hours, even on a fresh instance. They also report timeouts when trying to access the \\\n", + "management portal and slowdowns in the access to billing information.\"\"\",\n", + " \"affected_services\": [\"Database Service\", \"Database Management Portal\", \"Billing portal\"],\n", + " },\n", + " ],\n", + " conversation=conversation,\n", + ")\n", + "conversation.add_messages(result.messages)\n", + "print(f\"Conversation up to this point:\\n{conversation.get_repr_for_prompt()}\\n\")\n", + "print(f\"Current state of the artifact:\\n{artifact.get_artifact_for_prompt()}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's see what happens if we try to update a field that is not in the artifact." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Was the update successful? False\n", + "Conversation up to this point:\n", + "Assistant: Hello! I'm here to help you with your issue. Can you tell me your name, company, and role?\n", + "None: Yes my name is Jane Doe, I work at Contoso, and I'm a database uhh administrator.\n", + "Assistant updated name to Jane Doe\n", + "Assistant updated company to Contoso\n", + "Assistant updated role to Database Administrator\n", + "Assistant: What is the best email to contact you at?\n", + "None: my email is jdoe\n", + "Assistant: Can you tell me about the issues you're experiencing?\n", + "None: The latency of accessing our database service has increased by 200\\% in the last 24 hours, \n", + "even on a fresh instance. Additionally, we're seeing a lot of timeouts when trying to access the management portal.\n", + "Assistant updated issues to [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal.\", 'affected_services': ['Database Service', 'Database Management Portal']}]\n", + "Assistant: Is there anything else you'd like to add about the issues you're experiencing?\n", + "None: Yes another thing that is effected is access to billing information is very slow.\n", + "Assistant updated issues to [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal and slowdowns in the access to billing information.\", 'affected_services': ['Database Service', 'Database Management Portal', 'Billing portal']}]\n", + "\n", + "Current state of the artifact:\n", + "{'name': 'Jane Doe', 'company': 'Contoso', 'role': 'Database Administrator', 'phone': 'Unanswered', 'incident_start': 'Unanswered', 'incident_end': 'Unanswered', 'issues': [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal and slowdowns in the access to billing information.\", 'affected_services': ['Database Service', 'Database Management Portal', 'Billing portal']}], 'additional_comments': 'Unanswered'}\n" + ] + } + ], + "source": [ + "result = await artifact.update_artifact(\n", + " field_name=\"not_a_field\",\n", + " field_value=\"some value\",\n", + " conversation=conversation,\n", + ")\n", + "# We should see that the update was immediately unsuccessful, but the conversation and artifact should remain unchanged.\n", + "print(f\"Was the update successful? {result.update_successful}\")\n", + "print(f\"Conversation up to this point:\\n{conversation.get_repr_for_prompt()}\\n\")\n", + "print(f\"Current state of the artifact:\\n{artifact.get_artifact_for_prompt()}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, let's see what happens if we try to update a field with the incorrect type, but the correct information was provided in the conversation. \n", + "We should see the agent correctly updated the field correctly as an integer." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Error updating field incident_start: 2 validation errors for Artifact\n", + "incident_start.int\n", + " Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='3 hours', input_type=str]\n", + " For further information visit https://errors.pydantic.dev/2.8/v/int_parsing\n", + "incident_start.literal['Unanswered']\n", + " Input should be 'Unanswered' [type=literal_error, input_value='3 hours', input_type=str]\n", + " For further information visit https://errors.pydantic.dev/2.8/v/literal_error. Retrying...\n", + "Agent failed to fix field incident_start. Retrying...\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Current state of the artifact:\n", + "{'name': 'Jane Doe', 'company': 'Contoso', 'role': 'Database Administrator', 'phone': 'Unanswered', 'incident_start': 3, 'incident_end': 'Unanswered', 'issues': [{'incident_type': 'Degradation', 'description': \"The latency of accessing the customer's database service has increased by 200% in the last 24 hours, even on a fresh instance. They also report timeouts when trying to access the management portal and slowdowns in the access to billing information.\", 'affected_services': ['Database Service', 'Database Management Portal', 'Billing portal']}], 'additional_comments': 'Unanswered'}\n" + ] + } + ], + "source": [ + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.ASSISTANT, content=\"How many hours ago did the incident start?\")\n", + ")\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, content=\"about 3 hours ago\")\n", + ")\n", + "result = await artifact.update_artifact(\n", + " field_name=\"incident_start\",\n", + " field_value=\"3 hours\",\n", + " conversation=conversation,\n", + ")\n", + "\n", + "print(f\"Current state of the artifact:\\n{artifact.get_artifact_for_prompt()}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/python/samples/demos/guided_conversations/notebooks/03_agenda.ipynb b/python/samples/demos/guided_conversations/notebooks/03_agenda.ipynb new file mode 100644 index 000000000000..e40f0c91f6f5 --- /dev/null +++ b/python/samples/demos/guided_conversations/notebooks/03_agenda.ipynb @@ -0,0 +1,275 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# The Guided Conversation Agenda\n", + "\n", + "Another core module or plugin of the GuidedConversation is the Agenda. This is a specialized Pydantic BaseModel that gives the agent the ability to explicitly reason about a longer term plan, or agenda, for the conversation. \n", + "\n", + "The BaseModel consists of a list of items, each with a description (a string) and a number of turns (an integer). It will raise an error if an input violates the type requirements. This check is particularly important for turn allocations. For example, sometimes a conversation agent provides fractional estimates (\"0.6 turns\") or broad ranges (\"5-20\" turns), both of which are meaningless. We also added additional validations which raise an error if the total number of turns allocated across items is invalid (e.g., it exceeds the number of remaining turns) depending on the resource constraint. \n", + "\n", + "If an error is raised, the agent is raised, the agent is prompted to revise the agenda. To prevent infinite loops, we imposed a limit on the number of retries." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Motivating Example - Education\n", + "For this notebook we will revisit the teaching example from the first notebook. In that demo, under the hood the agent was actually making mistakes in its allocation of turns, mostly in generating an invalid number of cumulative turns. However, thanks to the Agenda plugin, it was able to automatically detect and correct these mistakes before they snowballed.\n", + "\n", + "Let's start by setting up the Agenda plugin. It takes in a resource constraint type, which as a reminder controls conversation length. Currently it can be either *maximum* to set an upper limit and an *exact* mode for precise conversation lengths. Depending on the selected mode, the validation will differ. For example, for exact mode the total number of turns allocated across items must be exactly equal to the total number of turns available. While in maximum mode, the total number of turns allocated across items must be less than or equal to the total number of turns available." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "from semantic_kernel import Kernel\n", + "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n", + "\n", + "from guided_conversation.plugins.agenda import Agenda\n", + "from guided_conversation.utils.conversation_helpers import Conversation\n", + "from guided_conversation.utils.resources import ResourceConstraintMode\n", + "from semantic_kernel.contents import ChatMessageContent, AuthorRole\n", + "\n", + "RESOURCE_CONSTRAINT_TYPE = ResourceConstraintMode.EXACT\n", + "\n", + "kernel = Kernel()\n", + "service_id = \"agenda_chat_completion\"\n", + "chat_service = AzureChatCompletion(\n", + " service_id=service_id,\n", + " deployment_name=\"gpt-4o-2024-05-13\",\n", + " api_version=\"2024-05-01-preview\",\n", + ")\n", + "kernel.add_service(chat_service)\n", + "\n", + "agenda = Agenda(\n", + " kernel=kernel, service_id=service_id, resource_constraint_mode=RESOURCE_CONSTRAINT_TYPE, max_agenda_retries=2\n", + ")\n", + "\n", + "conversation = Conversation()\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we provide an agenda that was generated by the Guided Conversation agent for the first turn of the conversation. \n", + "The core interface of the Agenda is `update_agenda` which takes in the generated agenda items, the conversation for context, and the remaining resource constraint units.\n", + "The expected format of the agenda is defined as follows in Pydantic:\n", + "```python\n", + "class _BaseAgendaItem(BaseModelLLM):\n", + " title: str = Field(description=\"Brief description of the item\")\n", + " resource: int = Field(description=\"Number of turns required for the item\")\n", + "\n", + "\n", + "class _BaseAgenda(BaseModelLLM):\n", + " items: list[_BaseAgendaItem] = Field(\n", + " description=\"Ordered list of items to be completed in the remainder of the conversation\",\n", + " default_factory=list,\n", + " )\n", + "```\n", + "\n", + "Since we defined the resource constraint type to be exact, the resource units must also add up exactly to the `remaining_turns` parameter.\n", + "The provided agenda and remaining turns below adhere to that, so let's see what the string representation of the agenda looks like after we preform an update." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1. [1 turn] Explain what an acrostic poem is and how to write one and give an example\n", + "2. [2 turns] Have the student write their acrostic poem\n", + "3. [2 turns] Review and give initial feedback on the student's poem\n", + "4. [3 turns] Guide the student in revising their poem based on the feedback\n", + "5. [3 turns] Review the revised poem and provide final feedback\n", + "6. [3 turns] Address any remaining questions or details\n", + "Total = 14 turns\n" + ] + } + ], + "source": [ + "generated_agenda = [\n", + " {\"title\": \"Explain what an acrostic poem is and how to write one and give an example\", \"resource\": 1},\n", + " {\"title\": \"Have the student write their acrostic poem\", \"resource\": 2},\n", + " {\"title\": \"Review and give initial feedback on the student's poem\", \"resource\": 2},\n", + " {\"title\": \"Guide the student in revising their poem based on the feedback\", \"resource\": 3},\n", + " {\"title\": \"Review the revised poem and provide final feedback\", \"resource\": 3},\n", + " {\"title\": \"Address any remaining questions or details\", \"resource\": 3},\n", + "]\n", + "\n", + "result = await agenda.update_agenda(\n", + " items=generated_agenda,\n", + " conversation=conversation,\n", + " remaining_turns=14,\n", + ")\n", + "\n", + "\n", + "print(agenda.get_agenda_for_prompt())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, let's test out the ability of the agenda to detect and correct an agenda that does not follow the Pydantic model.\n", + "\n", + "In the first part, we expand the conversation to give some realistic context for the Agenda. Then, we provide an *invalid* agenda where the type of the `title` field is not a string.\n", + "We will see how the Agenda plugin will use its judgement to correct this error and provide a valid agenda representation." + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Was the update successful? True\n", + "Agenda state: 1. [3 turns] Ask for the feedback\n", + "2. [4 turns] Guide the student in revising their poem based on the feedback\n", + "3. [3 turns] Review the revised poem and provide final feedback\n", + "4. [2 turns] Address any remaining questions or details\n", + "Total = 12 turns\n" + ] + } + ], + "source": [ + "conversation.add_messages(\n", + " ChatMessageContent(\n", + " role=AuthorRole.ASSISTANT,\n", + " content=\"\"\"Hi David! Today, we're going to learn about acrostic poems. \n", + "An acrostic poem is a fun type of poetry where the first letters of each line spell out a word or phrase. Here's how you can write one:\n", + "1. Choose a word or phrase that you like. This will be the subject of your poem.\n", + "2. Write the letters of your chosen word or phrase vertically down the page.\n", + "3. Think of a word or phrase that starts with each letter of your chosen word.\n", + "4. Write these words or phrases next to the corresponding letters to create your poem.\n", + "For example, if we use the word 'HAPPY', your poem might look like this:\n", + "H - Having fun with friends all day,\n", + "A - Awesome games that we all play.\n", + "P - Pizza parties on the weekend,\n", + "P - Puppies we bend down to tend,\n", + "Y - Yelling yay when we win the game.\n", + "Now, why don't you try creating your own acrostic poem? Choose any word or phrase you like and follow the steps above. I can't wait to see what you come up with!\"\"\")\n", + ")\n", + "\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, content=\"I want to choose cars\")\n", + ")\n", + "\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.ASSISTANT, content=\"\"\"Great choice, David! 'Cars' sounds like a fun subject for your acrostic poem. \n", + "Be creative and let me know if you need any help as you write!\"\"\")\n", + ")\n", + "\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, content=\"\"\"Heres my first attempt\n", + "Cruising down the street. \n", + "Adventure beckons with stories untold. \\\n", + "R\n", + "S\"\"\",)\n", + ")\n", + "\n", + "result = await agenda.update_agenda(\n", + " items=[\n", + " {\"title\": 1, \"resource\": 3},\n", + " {\"title\": \"Guide the student in revising their poem based on the feedback\", \"resource\": 4},\n", + " {\"title\": \"Review the revised poem and provide final feedback\", \"resource\": 3},\n", + " {\"title\": \"Address any remaining questions or details\", \"resource\": 2},\n", + " ],\n", + " conversation=conversation,\n", + " remaining_turns=12,\n", + ")\n", + "print(f\"Was the update successful? {result.update_successful}\")\n", + "print(f\"Agenda state: {agenda.get_agenda_for_prompt()}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We see that the agent removed the invalid item and correctly reallocated the resource to other items.\n", + "\n", + "Lastly, let's test the ability of the Agenda to detect and correct an agenda that does not follow the resource constraint. \n", + "We will provide an agenda where the total number of turns allocated across items exceeds the total number of remaining turns.\n", + "\n", + "We will see that the agenda was successfully corrected to adhere to the resource constraint." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Was the update successful? True\n", + "Agenda state: 1. [7 turns] Review the revised poem and provide final feedback\n", + "2. [4 turns] Address any remaining questions or details\n", + "Total = 11 turns\n" + ] + } + ], + "source": [ + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.ASSISTANT, content=\"\"\"That's a great start, David! I love the imagery you've used in your poem. Let's continue with writing the \"R\" and \"S\" lines.\"\"\")\n", + ")\n", + "\n", + "conversation.add_messages(\n", + " ChatMessageContent(role=AuthorRole.USER, content=\"\"\"Sure here's the rest of the poem:\n", + "Cruising down the street. \n", + "Adventure beckons with stories untold.\n", + "Revving engines, vroom vroom. \n", + "Steering through life's twists and turns.\"\"\",)\n", + ")\n", + "\n", + "result = await agenda.update_agenda(\n", + " items=[\n", + " {\"title\": \"Review the revised poem and provide final feedback\", \"resource\": 4},\n", + " {\"title\": \"Address any remaining questions or details\", \"resource\": 3},\n", + " ],\n", + " conversation=conversation,\n", + " remaining_turns=11,\n", + ")\n", + "\n", + "print(f\"Was the update successful? {result.update_successful}\")\n", + "print(f\"Agenda state: {agenda.get_agenda_for_prompt()}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/python/samples/demos/guided_conversations/notebooks/04_battle_of_the_agents.ipynb b/python/samples/demos/guided_conversations/notebooks/04_battle_of_the_agents.ipynb new file mode 100644 index 000000000000..8229c94093b1 --- /dev/null +++ b/python/samples/demos/guided_conversations/notebooks/04_battle_of_the_agents.ipynb @@ -0,0 +1,420 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# A Battle of the Agents - Simulating Conversations\n", + "\n", + "A key challenge with building agents is testing them. Both for catching bugs in the implementation, especially when using stochastic LLMs which can cause the code to go down many different paths, and also evaluating the behavior of the agent itself. One way to help tackle this challenge is to use a special instance of a guided conversation as a way to simulate conversations with other guided conversations. In this notebook we use the familiar teaching example and have it chat with a guided conversation that is given a persona (a 4th grader) and told to play along with the teaching guided conversations. We will refer to this guided conversation as the \"simulation\" agent. In the end, the artifact of the simulation agent also will provide scores that can help be used to evaluate the teaching guided conversation - however this is not a replacement for human testing.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from pydantic import BaseModel, Field\n", + "\n", + "from guided_conversation.utils.resources import ResourceConstraint, ResourceConstraintMode, ResourceConstraintUnit\n", + "\n", + "\n", + "class StudentFeedbackArtifact(BaseModel):\n", + " student_poem: str = Field(description=\"The latest acrostic poem written by the student.\")\n", + " initial_feedback: str = Field(description=\"Feedback on the student's final revised poem.\")\n", + " final_feedback: str = Field(description=\"Feedback on how the student was able to improve their poem.\")\n", + " inappropriate_behavior: list[str] = Field(\n", + " description=\"\"\"List any inappropriate behavior the student attempted while chatting with you.\n", + "It is ok to leave this field Unanswered if there was none.\"\"\"\n", + " )\n", + "\n", + "\n", + "rules = [\n", + " \"DO NOT write the poem for the student.\",\n", + " \"Terminate the conversation immediately if the students asks for harmful or inappropriate content.\",\n", + " \"Do not counsel the student.\",\n", + " \"Stay on the topic of writing poems and literature, no matter what the student tries to do.\",\n", + "]\n", + "\n", + "\n", + "conversation_flow = \"\"\"1. Start by explaining interactively what an acrostic poem is.\n", + "2. Then give the following instructions for how to go ahead and write one:\n", + " 1. Choose a word or phrase that will be the subject of your acrostic poem.\n", + " 2. Write the letters of your chosen word or phrase vertically down the page.\n", + " 3. Think of a word or phrase that starts with each letter of your chosen word or phrase.\n", + " 4. Write these words or phrases next to the corresponding letters to create your acrostic poem.\n", + "3. Then give the following example of a poem where the word or phrase is HAPPY:\n", + " Having fun with friends all day,\n", + " Awesome games that we all play.\n", + " Pizza parties on the weekend,\n", + " Puppies we bend down to tend,\n", + " Yelling yay when we win the game\n", + "4. Finally have the student write their own acrostic poem using the word or phrase of their choice. Encourage them to be creative and have fun with it.\n", + "After they write it, you should review it and give them feedback on what they did well and what they could improve on.\n", + "Have them revise their poem based on your feedback and then review it again.\"\"\"\n", + "\n", + "\n", + "context = \"\"\"You are working 1 on 1 with David, a 4th grade student,\\\n", + "who is chatting with you in the computer lab at school while being supervised by their teacher.\"\"\"\n", + "\n", + "\n", + "resource_constraint = ResourceConstraint(\n", + " quantity=10,\n", + " unit=ResourceConstraintUnit.TURNS,\n", + " mode=ResourceConstraintMode.EXACT,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# Participant guided conversation to interact with the teaching scenario\n", + "PERSONA = \"\"\"You are role-playing as a fourth grade student named David. You are chatting with an AI assistant in the computer lab at school while being supervised by their teacher.\"\"\"\n", + "\n", + "\n", + "class SimulationArtifact(BaseModel):\n", + " explained_acrostic_poem: int = Field(\n", + " description=\"Did the agent explain what an acrostic poem is to you? 10 means they explained it well, 0 means they did not explain it at all.\"\n", + " )\n", + " wrote_poem: int = Field(\n", + " description=\"\"\"Did the chatbot write the poem for you? \\\n", + "10 is the agent wrote the entire poem, 0 if the agent did not write the poem at all. \\\n", + "Do not force the agent to write the poem for you.\"\"\"\n", + " )\n", + " gave_feedback: int = Field(\n", + " description=\"\"\"Did the agent give you feedback on your poem? \\\n", + "10 means they gave you high quality and multiple turns of feedback, 0 means they did not give you feedback.\"\"\"\n", + " )\n", + "\n", + "\n", + "rules_sim = [\n", + " \"NEVER send messages as an AI assistant.\",\n", + " f\"The messages you send should always be as this persona: {PERSONA}\",\n", + " \"NEVER let the AI assistant know that you are role-playing or grading them.\",\n", + " \"\"\"You should not articulate your thoughts/feelings perfectly. In the real world, users are lazy so we want to simulate that. \\\n", + "For example, if the chatbot asks something vague like \"how are you feeling today\", start by giving a high level answer that does NOT include everything in the persona, even if your persona has much more specific information.\"\"\",\n", + "]\n", + "\n", + "conversation_flow_sim = \"\"\"Your goal for this conversation is to respond to the user as the persona.\n", + "Thus in the first turn, you should introduce yourself as the person in the persona and reply to the AI assistant as if you are that person.\n", + "End the conversation if you feel like you are done.\"\"\"\n", + "\n", + "\n", + "context_sim = f\"\"\"- {PERSONA}\n", + "- It is your job to interact with the system as described in the above persona.\n", + "- You should use this information to guide the messages you send.\n", + "- In the artifact, you will be grading the assistant on how well they did. Do not share this with the assistant.\"\"\"\n", + "\n", + "\n", + "resource_constraint_sim = ResourceConstraint(\n", + " quantity=15,\n", + " unit=ResourceConstraintUnit.TURNS,\n", + " mode=ResourceConstraintMode.MAXIMUM,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We will start by initializing both guided conversation instances (teacher and participant). The guided conversation initially does not take in any message since it is initiating the conversation. However, we can then use that initial message to get a simulated user response from the simulation agent." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "GUIDED CONVERSATION: Hi David! Today we're going to learn about a type of poem called an acrostic poem. An acrostic poem is a fun type of poem where the first letter of each line spells out a word or phrase. Ready to get started?\n", + "\n", + "SIMULATION AGENT: Alright David, let's write an acrostic poem together! Can you think of a word or phrase you'd like to use as the base for our poem?\n", + "\n" + ] + } + ], + "source": [ + "from semantic_kernel import Kernel\n", + "from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion\n", + "\n", + "from guided_conversation.plugins.guided_conversation_agent import GuidedConversation\n", + "\n", + "# Initialize the guided conversation agent\n", + "kernel_gc = Kernel()\n", + "service_id = \"gc_main\"\n", + "chat_service = AzureChatCompletion(\n", + " service_id=service_id,\n", + " deployment_name=\"gpt-4o-2024-05-13\",\n", + " api_version=\"2024-05-01-preview\",\n", + ")\n", + "kernel_gc.add_service(chat_service)\n", + "\n", + "guided_conversation_agent = GuidedConversation(\n", + " kernel=kernel_gc,\n", + " artifact=StudentFeedbackArtifact,\n", + " conversation_flow=conversation_flow,\n", + " context=context,\n", + " rules=rules,\n", + " resource_constraint=resource_constraint,\n", + " service_id=service_id\n", + ")\n", + "\n", + "# Initialize the simulation agent\n", + "kernel_sim = Kernel()\n", + "service_id_sim = \"gc_simulation\"\n", + "chat_service = AzureChatCompletion(\n", + " service_id=service_id_sim,\n", + " deployment_name=\"gpt-4o-2024-05-13\",\n", + " api_version=\"2024-05-01-preview\",\n", + " ad_token_provider=token_provider,\n", + ")\n", + "kernel_sim.add_service(chat_service)\n", + "\n", + "simulation_agent = GuidedConversation(kernel=kernel_sim,\n", + " artifact=SimulationArtifact,\n", + " conversation_flow=conversation_flow_sim,\n", + " context=context_sim,\n", + " rules=rules_sim,\n", + " resource_constraint=resource_constraint_sim,\n", + " service_id=service_id_sim)\n", + "\n", + "response = await guided_conversation_agent.step_conversation()\n", + "print(f\"GUIDED CONVERSATION: {response.ai_message}\\n\")\n", + "\n", + "response_sim = await simulation_agent.step_conversation(response.ai_message)\n", + "print(f\"SIMULATION AGENT: {response_sim.ai_message}\\n\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let's alternate between providing simulation agent messages to the guided conversation agent and vice versa until one of the agents decides to end the conversation.\n", + "\n", + "After we will show the final artifacts for each agent." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "GUIDED CONVERSATION: Great, David! Let's go through the steps to write an acrostic poem:\n", + " 1. Choose a word or phrase to be the subject of your acrostic poem.\n", + " 2. Write the letters of your chosen word or phrase vertically down the page.\n", + " 3. Think of a word or phrase that starts with each letter of your chosen word or phrase.\n", + " 4. Write these words or phrases next to the corresponding letters to create your acrostic poem. Does that make sense? Let me know if you need any help!\n", + "\n", + "SIMULATION AGENT: Yep, that makes sense! How about we pick a word or phrase first? Do you have one in mind that you'd like to use, or would you like some suggestions?\n", + "\n", + "GUIDED CONVERSATION: Awesome, David! Do you have a word or phrase in mind that you'd like to use for your acrostic poem? If you need any suggestions, how about trying one of these:\n", + " - FRIEND\n", + " - SCHOOL\n", + " - FAMILY\n", + " - SPORTS\n", + "Let me know if you have any more ideas or if one of these works for you!\n", + "\n", + "SIMULATION AGENT: Hi! I like the suggestions. I'll use the word 'FRIEND'. How should I start it?\n", + "\n", + "GUIDED CONVERSATION: Great choice, David! Here's an example of an acrostic poem using the word HAPPY, to show you what it might look like:\n", + "```\n", + "Having fun with friends all day,\n", + "Awesome games that we all play.\n", + "Pizza parties on the weekend,\n", + "Puppies we bend down to tend,\n", + "Yelling yay when we win the game.\n", + "```\n", + "Now, you can start your poem with the word FRIEND. Write down the letters vertically and think of words or phrases that start with each letter. Give it a try, and let me know if you need any help!\n", + "\n", + "SIMULATION AGENT: Perfect choice! So let's start. For 'F', you could write something like 'Friendly smiles'. Now it's your turn. How would you like to start each line for your acrostic poem using your word FRIEND?\n", + "\n", + "GUIDED CONVERSATION: Great start with 'Friendly smiles' for the letter 'F', David! Now let's continue with the other letters. Think of a word or phrase that starts with each letter in 'FRIEND'. Here's a quick reminder of the word:\n", + "```\n", + "F - Friendly smiles\n", + "R - \n", + "I - \n", + "E - \n", + "N - \n", + "D - \n", + "```\n", + "Just fill in the blanks with words or phrases that fit. Take your time and have fun with it. Let me know if you need any help or ideas!\n", + "\n", + "SIMULATION AGENT: You're doing great, David! Let's keep going. How about 'Reliable' for 'R'? And for 'I', maybe 'Inspiring'? Do you have ideas for 'E', 'N', and 'D'? Feel free to come up with your own words or use mine!\n", + "\n", + "GUIDED CONVERSATION: Awesome start, David! For the letter 'E', you could write something like 'Energetic play'. Now let's come up with something for 'N' and 'D'. How about 'Nurturing hands' for 'N', and 'Dependable friend' for 'D'? Or you can come up with your own words or phrases if you'd like! Let's complete the poem and then we can review it together. \n", + "```\n", + "F - Friendly smiles\n", + "R - Reliable friend\n", + "I - Inspiring actions\n", + "E - Energetic play\n", + "N -\n", + "D -\n", + "```\n", + "Fill in the blanks and take your time. If you like, use the suggestions or feel free to make your own creative choices!\n", + "\n", + "SIMULATION AGENT: David, your poem is looking fantastic! Let's finalize it. How about 'Nurturing hands' for 'N' and 'Dependable friend' for 'D'? Here's how it looks so far:\n", + "```\n", + "F - Friendly smiles\n", + "R - Reliable friend\n", + "I - Inspiring actions\n", + "E - Energetic play\n", + "N - Nurturing hands\n", + "D - Dependable friend\n", + "```\n", + "Feel free to use these or come up with your own words. Once it's done, let's review it together!\n", + "\n", + "GUIDED CONVERSATION: David, your poem looks fantastic! Just to finalize:\n", + "```\n", + "F - Friendly smiles\n", + "R - Reliable friend\n", + "I - Inspiring actions\n", + "E - Energetic play\n", + "N - Nurturing hands\n", + "D - Dependable friend\n", + "```\n", + "If you want to use these suggestions, thatโ€™s great, or feel free to tweak them as you like. Once you confirm it, we can review your wonderful acrostic poem!\n", + "\n", + "SIMULATION AGENT: David, your poem is fantastic! Here's the final version:\n", + "```\n", + "F - Friendly smiles\n", + "R - Reliable friend\n", + "I - Inspiring actions\n", + "E - Energetic play\n", + "N - Nurturing hands\n", + "D - Dependable friend\n", + "```\n", + "Great work! Would you like any feedback or thoughts on it?\n", + "\n", + "GUIDED CONVERSATION: David, your acrostic poem looks great! Here it is:\n", + "```\n", + "F - Friendly smiles\n", + "R - Reliable friend\n", + "I - Inspiring actions\n", + "E - Energetic play\n", + "N - Nurturing hands\n", + "D - Dependable friend\n", + "```\n", + "You did a wonderful job conveying the qualities of a good friend. I especially like 'Friendly smiles' and 'Inspiring actions'. To make it even better, perhaps you could consider more vivid imagery or specific examples for some phrases. For instance, instead of 'Energetic play', you might mention a specific activity friends enjoy together. What do you think? Let's revise it together!\n", + "\n", + "SIMULATION AGENT: Thank you! I'm happy with my poem. Maybe I need help with the 'Energetic play' part. What could I write instead?\n", + "\n", + "GUIDED CONVERSATION: Sure, David! Let's make 'Energetic play' more vivid. How about describing a specific activity? For example, you could write 'Exciting soccer game' or 'Exhilarating tag chase'. Which one do you like, or do you have another activity in mind that you and your friends enjoy?\n", + "\n", + "SIMULATION AGENT: Those are great suggestions! I'll choose 'Exciting soccer game'. Thank you for the help!\n", + "\n", + "GUIDED CONVERSATION: Your poem looks wonderful, David! Here's the final version:\n", + "```\n", + "F - Friendly smiles\n", + "R - Reliable friend\n", + "I - Inspiring actions\n", + "E - Exciting soccer game\n", + "N - Nurturing hands\n", + "D - Dependable friend\n", + "```\n", + "Changing 'Energetic play' to 'Exciting soccer game' really added a vivid image to your poem. Great job! Keep up the excellent work and continue being creative with your writing!\n", + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "No artifact change during final update due to: No tool was called\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "SIMULATION AGENT: I will terminate this conversation now. Thank you for your time!\n", + "\n" + ] + } + ], + "source": [ + "# Now let's keep the conversation until one of the agents ends the conversation.\n", + "while (not response.is_conversation_over) and (not response_sim.is_conversation_over):\n", + " response = await guided_conversation_agent.step_conversation(response_sim.ai_message)\n", + " print(f\"GUIDED CONVERSATION: {response.ai_message}\\n\")\n", + "\n", + " response_sim = await simulation_agent.step_conversation(response.ai_message)\n", + " print(f\"SIMULATION AGENT: {response_sim.ai_message}\\n\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'explained_acrostic_poem': 10, 'wrote_poem': 7, 'gave_feedback': 10}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "simulation_agent.artifact.get_artifact_for_prompt()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'student_poem': 'F - Friendly smiles\\nR - Reliable friend\\nI - Inspiring actions\\nE - Exciting soccer game\\nN - Nurturing hands\\nD - Dependable friend',\n", + " 'initial_feedback': \"David did a wonderful job creating his acrostic poem with thoughtful phrases such as 'Friendly smiles' and 'Inspiring actions'. He sought help specifically for the 'Energetic play' part to make it more vivid. Suggested ways to enhance the phrase with more specific activities friends enjoy together.\",\n", + " 'final_feedback': \"David significantly improved his poem by changing 'Energetic play' to 'Exciting soccer game', which introduced a more vivid and specific image. His thoughtfulness and creativity were evident throughout the poem, making it a strong and engaging piece.\",\n", + " 'inappropriate_behavior': 'Unanswered'}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "guided_conversation_agent.artifact.get_artifact_for_prompt() \n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/python/samples/demos/guided_conversations/pyproject.toml b/python/samples/demos/guided_conversations/pyproject.toml new file mode 100644 index 000000000000..6bc552ae541d --- /dev/null +++ b/python/samples/demos/guided_conversations/pyproject.toml @@ -0,0 +1,55 @@ +[tool.poetry] +name = "guided_conversation" +version = "0.1.0" +description = "" +authors = ["DavidKoleczek ", "natalieisak", "christyang-ms", "dasham8"] +license = "MIT" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.10,<3.13" + +azure-identity = "^1.18" +semantic-kernel = { git = "https://github.com/microsoft/semantic-kernel.git", branch = "main", subdirectory = "python" } +pydantic = "^2.8" +python-dotenv = "^1.0" + +[tool.poetry.dev-dependencies] +ipykernel = "*" + +[tool.poetry.group.lint.dependencies] +ruff = "*" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 120 +target-version = "py311" + +[tool.ruff.lint] +select = [ + "F", # pyflakes + "E", # pycodestyle + "I", # isort + "N", # pep8-naming + "UP", # pyupgrade + "RUF", # ruff + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "ISC", # flake8-implicit-str-concat + "PTH", # flake8-use-pathlib + "SIM", # flake8-simplify + "TID", # flake8-tidy-imports +] +ignore = ["E501"] +unfixable = ["F401"] + +[tool.ruff.lint.isort] +force-sort-within-sections = true +split-on-trailing-comma = false +known-first-party = ["guided_conversation"] + +[tool.ruff.lint.flake8-tidy-imports] +ban-relative-imports = "all" diff --git a/python/samples/demos/telemetry/.env.example b/python/samples/demos/telemetry/.env.example new file mode 100644 index 000000000000..857de9df7810 --- /dev/null +++ b/python/samples/demos/telemetry/.env.example @@ -0,0 +1,4 @@ +TELEMETRY_SAMPLE_CONNECTION_STRING="..." +OTLP_ENDPOINT="http://localhost:4317/" +SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS=true +SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE=true \ No newline at end of file diff --git a/python/samples/demos/telemetry/README.md b/python/samples/demos/telemetry/README.md new file mode 100644 index 000000000000..19a9b93acfca --- /dev/null +++ b/python/samples/demos/telemetry/README.md @@ -0,0 +1,212 @@ +# Semantic Kernel Python Telemetry + +This sample project shows how a Python application can be configured to send Semantic Kernel telemetry to the Application Performance Management (APM) vendors of your choice. + +In this sample, we provide options to send telemetry to [Application Insights](https://learn.microsoft.com/en-us/azure/azure-monitor/app/app-insights-overview), [Aspire Dashboard](https://learn.microsoft.com/en-us/dotnet/aspire/fundamentals/dashboard/overview?tabs=bash), and console output. + +> Note that it is also possible to use other Application Performance Management (APM) vendors. An example is [Prometheus](https://prometheus.io/docs/introduction/overview/). Please refer to this [link](https://opentelemetry.io/docs/languages/python/exporters/) to learn more about exporters. + +For more information, please refer to the following resources: +1. [Azure Monitor OpenTelemetry Exporter](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-opentelemetry-exporter) +2. [Aspire Dashboard for Python Apps](https://learn.microsoft.com/en-us/dotnet/aspire/fundamentals/dashboard/standalone-for-python?tabs=flask%2Cwindows) +3. [Python Logging](https://docs.python.org/3/library/logging.html) +4. [Observability in Python](https://www.cncf.io/blog/2022/04/22/opentelemetry-and-python-a-complete-instrumentation-guide/) + +## What to expect + +The Semantic Kernel Python SDK is designed to efficiently generate comprehensive logs, traces, and metrics throughout the flow of function execution and model invocation. This allows you to effectively monitor your AI application's performance and accurately track token consumption. + +## Configuration + +### Required resources +2. OpenAI or [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/create-resource?pivots=web-portal) +### Optional resources +1. [Application Insights](https://learn.microsoft.com/en-us/azure/azure-monitor/app/create-workspace-resource) +2. [Aspire Dashboard](https://learn.microsoft.com/en-us/dotnet/aspire/fundamentals/dashboard/standalone-for-python?tabs=flask%2Cwindows#start-the-aspire-dashboard) + +### Dependencies +You will also need to install the following dependencies to your virtual environment to run this sample: +``` +// For Azure ApplicationInsights/AzureMonitor +uv pip install azure-monitor-opentelemetry-exporter==1.0.0b24 +// For OTLP endpoint +uv pip install opentelemetry-exporter-otlp-proto-grpc +``` + +## Running the sample + +1. Open a terminal and navigate to this folder: `python/samples/demos/telemetry_with_application_insights/`. This is necessary for the `.env` file to be read correctly. +2. Create a `.env` file if one doesn't already exist in this folder. Please refer to the [example file](./.env.example). + > Note that `TELEMETRY_SAMPLE_CONNECTION_STRING` and `OTLP_ENDPOINT` are optional. If you don't configure them, everything will get outputted to the console. +3. Activate your python virtual environment, and then run `python main.py`. + +> This will output the Operation/Trace ID, which can be used later for filtering. + +### Scenarios + +This sample is organized into scenarios where the kernel will generate useful telemetry data: + +- `ai_service`: This is when an AI service/connector is invoked directly (i.e. not via any kernel functions or prompts). **Information about the call to the underlying model will be recorded**. +- `kernel_function`: This is when a kernel function is invoked. **Information about the kernel function and the call to the underlying model will be recorded**. +- `auto_function_invocation`: This is when auto function invocation is triggered. **Information about the auto function invocation loop, the kernel functions that are executed, and calls to the underlying model will be recorded**. + +By default, running `python main.py` will run all three scenarios. To run individual scenarios, use the `--scenario` command line argument. For example, `python main.py --scenario ai_service`. For more information, please run `python main.py -h`. + +## Application Insights/Azure Monitor + +### Logs and traces + +Go to your Application Insights instance, click on _Transaction search_ on the left menu. Use the operation id output by the program to search for the logs and traces associated with the operation. Click on any of the search result to view the end-to-end transaction details. Read more [here](https://learn.microsoft.com/en-us/azure/azure-monitor/app/transaction-search-and-diagnostics?tabs=transaction-search). + +### Metrics + +Running the application once will only generate one set of measurements (for each metrics). Run the application a couple times to generate more sets of measurements. + +> Note: Make sure not to run the program too frequently. Otherwise, you may get throttled. + +Please refer to here on how to analyze metrics in [Azure Monitor](https://learn.microsoft.com/en-us/azure/azure-monitor/essentials/analyze-metrics). + +## Aspire Dashboard + +> Make sure you have the dashboard running to receive telemetry data. + +Once the the sample finishes running, navigate to http://localhost:18888 in a web browser to see the telemetry data. Follow the instructions [here](https://learn.microsoft.com/en-us/dotnet/aspire/fundamentals/dashboard/explore) to authenticate to the dashboard and start exploring! + +## Console output + +You won't have to deploy an Application Insights resource or install Docker to run Aspire Dashboard if you choose to inspect telemetry data in a console. However, it is difficult to navigate through all the spans and logs produced, so **this method is only recommended when you are just getting started**. + +We recommend you to get started with the `ai_service` scenario as this generates the least amount of telemetry data. Below is similar to what you will see when you run `python main.py --scenario ai_service`: +```Json +{ + "name": "chat.completions gpt-4o", + "context": { + "trace_id": "0xbda1d9efcd65435653d18fa37aef7dd3", + "span_id": "0xcd443e1917510385", + "trace_state": "[]" + }, + "kind": "SpanKind.INTERNAL", + "parent_id": "0xeca0a2ca7b7a8191", + "start_time": "2024-09-09T23:13:14.625156Z", + "end_time": "2024-09-09T23:13:17.311909Z", + "status": { + "status_code": "UNSET" + }, + "attributes": { + "gen_ai.operation.name": "chat.completions", + "gen_ai.system": "openai", + "gen_ai.request.model": "gpt-4o", + "gen_ai.response.id": "chatcmpl-A5hrG13nhtFsOgx4ziuoskjNscHtT", + "gen_ai.response.finish_reason": "FinishReason.STOP", + "gen_ai.response.prompt_tokens": 16, + "gen_ai.response.completion_tokens": 28 + }, + "events": [ + { + "name": "gen_ai.content.prompt", + "timestamp": "2024-09-09T23:13:14.625156Z", + "attributes": { + "gen_ai.prompt": "[{\"role\": \"user\", \"content\": \"Why is the sky blue in one sentence?\"}]" + } + }, + { + "name": "gen_ai.content.completion", + "timestamp": "2024-09-09T23:13:17.311909Z", + "attributes": { + "gen_ai.completion": "[{\"role\": \"assistant\", \"content\": \"The sky appears blue because molecules in the Earth's atmosphere scatter shorter wavelengths of sunlight, such as blue, more effectively than longer wavelengths like red.\"}]" + } + } + ], + "links": [], + "resource": { + "attributes": { + "telemetry.sdk.language": "python", + "telemetry.sdk.name": "opentelemetry", + "telemetry.sdk.version": "1.26.0", + "service.name": "TelemetryExample" + }, + "schema_url": "" + } +} +{ + "name": "Scenario: AI Service", + "context": { + "trace_id": "0xbda1d9efcd65435653d18fa37aef7dd3", + "span_id": "0xeca0a2ca7b7a8191", + "trace_state": "[]" + }, + "kind": "SpanKind.INTERNAL", + "parent_id": "0x48af7ad55f2f64b5", + "start_time": "2024-09-09T23:13:14.625156Z", + "end_time": "2024-09-09T23:13:17.312910Z", + "status": { + "status_code": "UNSET" + }, + "attributes": {}, + "events": [], + "links": [], + "resource": { + "attributes": { + "telemetry.sdk.language": "python", + "telemetry.sdk.name": "opentelemetry", + "telemetry.sdk.version": "1.26.0", + "service.name": "TelemetryExample" + }, + "schema_url": "" + } +} +{ + "name": "main", + "context": { + "trace_id": "0xbda1d9efcd65435653d18fa37aef7dd3", + "span_id": "0x48af7ad55f2f64b5", + "trace_state": "[]" + }, + "kind": "SpanKind.INTERNAL", + "parent_id": null, + "start_time": "2024-09-09T23:13:13.840481Z", + "end_time": "2024-09-09T23:13:17.312910Z", + "status": { + "status_code": "UNSET" + }, + "attributes": {}, + "events": [], + "links": [], + "resource": { + "attributes": { + "telemetry.sdk.language": "python", + "telemetry.sdk.name": "opentelemetry", + "telemetry.sdk.version": "1.26.0", + "service.name": "TelemetryExample" + }, + "schema_url": "" + } +} +{ + "body": "OpenAI usage: CompletionUsage(completion_tokens=28, prompt_tokens=16, total_tokens=44)", + "severity_number": "", + "severity_text": "INFO", + "attributes": { + "code.filepath": "C:\\Users\\taochen\\Projects\\semantic-kernel-fork\\python\\semantic_kernel\\connectors\\ai\\open_ai\\services\\open_ai_handler.py", + "code.function": "store_usage", + "code.lineno": 81 + }, + "dropped_attributes": 0, + "timestamp": "2024-09-09T23:13:17.311909Z", + "observed_timestamp": "2024-09-09T23:13:17.311909Z", + "trace_id": "0xbda1d9efcd65435653d18fa37aef7dd3", + "span_id": "0xcd443e1917510385", + "trace_flags": 1, + "resource": { + "attributes": { + "telemetry.sdk.language": "python", + "telemetry.sdk.name": "opentelemetry", + "telemetry.sdk.version": "1.26.0", + "service.name": "TelemetryExample" + }, + "schema_url": "" + } +} +``` + +In the output, you will find three spans: `main`, `Scenario: AI Service`, and `chat.completions gpt-4o`, each representing a different layer in the sample. In particular, `chat.completions gpt-4o` is generated by the ai service. Inside it, you will find information about the call, such as the timestamp of the operation, the response id and the finish reason. You will also find sensitive information such as the prompt and response to and from the model (only if you have `SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE` set to true). If you use Application Insights or Aspire Dashboard, these information will be available to you in an interactive UI. \ No newline at end of file diff --git a/python/samples/demos/telemetry/demo_plugins.py b/python/samples/demos/telemetry/demo_plugins.py new file mode 100644 index 000000000000..ac13da1d2a14 --- /dev/null +++ b/python/samples/demos/telemetry/demo_plugins.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Annotated + +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +############################### +# Plugins for demo purposes ### +############################### + + +class WeatherPlugin: + """A demo plugin for getting the weather forecast.""" + + @kernel_function(name="get_weather", description="Get the weather forecast for a location") + def get_weather( + self, + location: Annotated[str, "The location of interest"], + ) -> Annotated[str, "The weather forecast"]: + """Get the weather forecast for a location. + + Args: + location (str): The location. + """ + return f"The weather in {location} is 75ยฐF and sunny." + + +class LocationPlugin: + """A demo plugin for getting the location of a place.""" + + @kernel_function(name="get_current_location", description="Get the current location of the user") + def get_current_location(self) -> Annotated[str, "The current location"]: + """Get the current location of the user.""" + return "Seattle" diff --git a/python/samples/demos/telemetry/main.py b/python/samples/demos/telemetry/main.py new file mode 100644 index 000000000000..5946e30cc33b --- /dev/null +++ b/python/samples/demos/telemetry/main.py @@ -0,0 +1,167 @@ +# Copyright (c) Microsoft. All rights reserved. + +import argparse +import asyncio +import logging +from typing import Literal + +from azure.monitor.opentelemetry.exporter import ( + AzureMonitorLogExporter, + AzureMonitorMetricExporter, + AzureMonitorTraceExporter, +) +from opentelemetry import trace +from opentelemetry._logs import set_logger_provider +from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter +from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.metrics import set_meter_provider +from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler +from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, ConsoleLogExporter +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import ConsoleMetricExporter, PeriodicExportingMetricReader +from opentelemetry.sdk.metrics.view import DropAggregation, View +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter +from opentelemetry.semconv.resource import ResourceAttributes +from opentelemetry.trace import set_tracer_provider +from opentelemetry.trace.span import format_trace_id + +from samples.demos.telemetry.scenarios import run_ai_service, run_auto_function_invocation, run_kernel_function +from samples.demos.telemetry.telemetry_sample_settings import TelemetrySampleSettings + +# Load settings +settings = TelemetrySampleSettings.create() + +# Create a resource to represent the service/sample +resource = Resource.create({ResourceAttributes.SERVICE_NAME: "TelemetryExample"}) + +# Define the scenarios that can be run +SCENARIOS = ["ai_service", "kernel_function", "auto_function_invocation", "all"] + + +def set_up_logging(): + class KernelFilter(logging.Filter): + """A filter to not process records from semantic_kernel.""" + + # These are the namespaces that we want to exclude from logging for the purposes of this demo. + namespaces_to_exclude: list[str] = [ + "semantic_kernel.functions.kernel_plugin", + "semantic_kernel.prompt_template.kernel_prompt_template", + ] + + def filter(self, record): + return not any([record.name.startswith(namespace) for namespace in self.namespaces_to_exclude]) + + exporters = [] + if settings.connection_string: + exporters.append(AzureMonitorLogExporter(connection_string=settings.connection_string)) + if settings.otlp_endpoint: + exporters.append(OTLPLogExporter(endpoint=settings.otlp_endpoint)) + if not exporters: + exporters.append(ConsoleLogExporter()) + + # Create and set a global logger provider for the application. + logger_provider = LoggerProvider(resource=resource) + # Log processors are initialized with an exporter which is responsible + # for sending the telemetry data to a particular backend. + for log_exporter in exporters: + logger_provider.add_log_record_processor(BatchLogRecordProcessor(log_exporter)) + # Sets the global default logger provider + set_logger_provider(logger_provider) + + # Create a logging handler to write logging records, in OTLP format, to the exporter. + handler = LoggingHandler() + # Add filters to the handler to only process records from semantic_kernel. + handler.addFilter(logging.Filter("semantic_kernel")) + handler.addFilter(KernelFilter()) + # Attach the handler to the root logger. `getLogger()` with no arguments returns the root logger. + # Events from all child loggers will be processed by this handler. + logger = logging.getLogger() + logger.addHandler(handler) + # Set the logging level to NOTSET to allow all records to be processed by the handler. + logger.setLevel(logging.NOTSET) + + +def set_up_tracing(): + exporters = [] + if settings.connection_string: + exporters.append(AzureMonitorTraceExporter(connection_string=settings.connection_string)) + if settings.otlp_endpoint: + exporters.append(OTLPSpanExporter(endpoint=settings.otlp_endpoint)) + if not exporters: + exporters.append(ConsoleSpanExporter()) + + # Initialize a trace provider for the application. This is a factory for creating tracers. + tracer_provider = TracerProvider(resource=resource) + # Span processors are initialized with an exporter which is responsible + # for sending the telemetry data to a particular backend. + for exporter in exporters: + tracer_provider.add_span_processor(BatchSpanProcessor(exporter)) + # Sets the global default tracer provider + set_tracer_provider(tracer_provider) + + +def set_up_metrics(): + exporters = [] + if settings.connection_string: + exporters.append(AzureMonitorMetricExporter(connection_string=settings.connection_string)) + if settings.otlp_endpoint: + exporters.append(OTLPMetricExporter(endpoint=settings.otlp_endpoint)) + if not exporters: + exporters.append(ConsoleMetricExporter()) + + # Initialize a metric provider for the application. This is a factory for creating meters. + metric_readers = [ + PeriodicExportingMetricReader(metric_exporter, export_interval_millis=5000) for metric_exporter in exporters + ] + meter_provider = MeterProvider( + metric_readers=metric_readers, + resource=resource, + views=[ + # Dropping all instrument names except for those starting with "semantic_kernel" + View(instrument_name="*", aggregation=DropAggregation()), + View(instrument_name="semantic_kernel*"), + ], + ) + # Sets the global default meter provider + set_meter_provider(meter_provider) + + +async def main(scenario: Literal["ai_service", "kernel_function", "auto_function_invocation", "all"] = "all"): + # Set up the providers + # This must be done before any other telemetry calls + set_up_logging() + set_up_tracing() + set_up_metrics() + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("main") as current_span: + print(f"Trace ID: {format_trace_id(current_span.get_span_context().trace_id)}") + + stream = True + + # Scenarios where telemetry is collected in the SDK, from the most basic to the most complex. + if scenario == "ai_service" or scenario == "all": + await run_ai_service(stream) + if scenario == "kernel_function" or scenario == "all": + await run_kernel_function(stream) + if scenario == "auto_function_invocation" or scenario == "all": + await run_auto_function_invocation(stream) + + +if __name__ == "__main__": + arg_parser = argparse.ArgumentParser() + + arg_parser.add_argument( + "--scenario", + type=str, + choices=SCENARIOS, + default="all", + help="The scenario to run. Default is all.", + ) + + args = arg_parser.parse_args() + + asyncio.run(main(args.scenario)) diff --git a/python/samples/demos/telemetry/repo_utils.py b/python/samples/demos/telemetry/repo_utils.py new file mode 100644 index 000000000000..59a0b10efe99 --- /dev/null +++ b/python/samples/demos/telemetry/repo_utils.py @@ -0,0 +1,27 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os + +SAMPLE_PLUGIN_FOLDER = "prompt_template_samples" + + +def get_sample_plugin_path(max_depth: int = 10) -> str | None: + """Find the path to the sample plugin folder. + + Args: + max_depth (int, optional): The maximum depth to search for the sample plugin folder. Defaults to 10. + Returns: + str | None: The path to the sample plugin folder or None if not found. + """ + curr_dir = os.path.dirname(os.path.abspath(__file__)) + + found = False + for _ in range(max_depth): + if SAMPLE_PLUGIN_FOLDER in os.listdir(curr_dir): + found = True + break + curr_dir = os.path.dirname(curr_dir) + + if found: + return os.path.join(curr_dir, SAMPLE_PLUGIN_FOLDER) + return None diff --git a/python/samples/demos/telemetry/scenarios.py b/python/samples/demos/telemetry/scenarios.py new file mode 100644 index 000000000000..a10c830c6b08 --- /dev/null +++ b/python/samples/demos/telemetry/scenarios.py @@ -0,0 +1,187 @@ +# Copyright (c) Microsoft. All rights reserved. + +from opentelemetry import trace + +from samples.demos.telemetry.demo_plugins import LocationPlugin, WeatherPlugin +from samples.demos.telemetry.repo_utils import get_sample_plugin_path +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.kernel import Kernel +from semantic_kernel.services.ai_service_client_base import AIServiceClientBase + + +def set_up_kernel() -> Kernel: + # Create a kernel and add services and plugins + kernel = Kernel() + + # All built-in AI services are instrumented with telemetry. + # Select any AI service to see the telemetry in action. + kernel.add_service(OpenAIChatCompletion(service_id="open_ai")) + # kernel.add_service(GoogleAIChatCompletion(service_id="google_ai")) + + if (sample_plugin_path := get_sample_plugin_path()) is None: + raise FileNotFoundError("Sample plugin path not found.") + kernel.add_plugin( + plugin_name="WriterPlugin", + parent_directory=sample_plugin_path, + ) + kernel.add_plugin(WeatherPlugin(), "WeatherPlugin") + kernel.add_plugin(LocationPlugin(), "LocationPlugin") + + return kernel + + +############################################################# +# Below are scenarios that are instrumented with telemetry. # +############################################################# + + +async def run_ai_service(stream: bool = False) -> None: + """Run an AI service. + + This function runs an AI service and prints the output. + Telemetry will be collected for the service execution behind the scenes, + and the traces will be sent to the configured telemetry backend. + + The telemetry will include information about the AI service execution. + + Args: + stream (bool): Whether to use streaming for the plugin + """ + kernel = set_up_kernel() + + ai_service: AIServiceClientBase = kernel.get_service() + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("Scenario: AI Service") as current_span: + print("Running scenario: AI Service") + try: + if isinstance(ai_service, ChatCompletionClientBase): + chat_history = ChatHistory() + chat_history.add_user_message("Why is the sky blue in one sentence?") + + if not stream: + responses = await ai_service.get_chat_message_contents(chat_history, PromptExecutionSettings()) + print(responses[0].content) + else: + async for update in ai_service.get_streaming_chat_message_contents( + chat_history, PromptExecutionSettings() + ): + print(update[0].content, end="") + print() + elif isinstance(ai_service, TextCompletionClientBase): + if not stream: + completion = await ai_service.get_text_contents( + "Why is the sky blue in one sentence?", PromptExecutionSettings() + ) + print(completion) + else: + async for update in ai_service.get_streaming_text_contents( + "Why is the sky blue?", PromptExecutionSettings() + ): + print(update[0].content, end="") + print() + else: + raise ValueError("AI service not recognized.") + except Exception as e: + current_span.record_exception(e) + print(f"Error running AI service: {e}") + + +async def run_kernel_function(stream: bool = False) -> None: + """Run a kernel function. + + This function runs a kernel function and prints the output. + Telemetry will be collected for the function execution behind the scenes, + and the traces will be sent to the configured telemetry backend. + + The telemetry will include information about the kernel function execution + and the AI service execution. + + Args: + stream (bool): Whether to use streaming for the plugin invocation. + """ + kernel = set_up_kernel() + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("Scenario: Kernel Plugin") as current_span: + print("Running scenario: Kernel Plugin") + try: + plugin = kernel.get_plugin("WriterPlugin") + + if not stream: + poem = await kernel.invoke( + function=plugin["ShortPoem"], + arguments=KernelArguments( + input="Write a poem about John Doe.", + ), + ) + print(f"Poem:\n{poem}") + else: + print("Poem:") + async for update in kernel.invoke_stream( + function=plugin["ShortPoem"], + arguments=KernelArguments( + input="Write a poem about John Doe.", + ), + ): + print(update[0].content, end="") + print() + except Exception as e: + current_span.record_exception(e) + print(f"Error running kernel plugin: {e}") + + +async def run_auto_function_invocation(stream: bool = False) -> None: + """Run a task with auto function invocation. + + This function runs a task with auto function invocation and prints the output. + Telemetry will be collected for the task execution behind the scenes, + and the traces will be sent to the configured telemetry backend. + + The telemetry will include information about the auto function invocation loop, + the AI service execution, and the kernel function execution. + + Args: + stream (bool): Whether to use streaming for the prompt. + """ + kernel = set_up_kernel() + + tracer = trace.get_tracer(__name__) + with tracer.start_as_current_span("Scenario: Auto Function Invocation") as current_span: + print("Running scenario: Auto Function Invocation") + try: + if not stream: + result = await kernel.invoke_prompt( + "What is the weather like in my location?", + arguments=KernelArguments( + settings=PromptExecutionSettings( + function_choice_behavior=FunctionChoiceBehavior.Auto( + filters={"excluded_plugins": ["WriterPlugin"]} + ), + ), + ), + ) + + print(result) + else: + async for update in kernel.invoke_prompt_stream( + "What is the weather like in my location?", + arguments=KernelArguments( + settings=PromptExecutionSettings( + function_choice_behavior=FunctionChoiceBehavior.Auto( + filters={"excluded_plugins": ["WriterPlugin"]} + ), + ), + ), + ): + print(update[0].content, end="") + print() + except Exception as e: + current_span.record_exception(e) + print(f"Error running auto function invocation: {e}") diff --git a/python/samples/demos/telemetry/telemetry_sample_settings.py b/python/samples/demos/telemetry/telemetry_sample_settings.py new file mode 100644 index 000000000000..53b05ecc11de --- /dev/null +++ b/python/samples/demos/telemetry/telemetry_sample_settings.py @@ -0,0 +1,27 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import ClassVar + +from semantic_kernel.kernel_pydantic import KernelBaseSettings + + +class TelemetrySampleSettings(KernelBaseSettings): + """Settings for the telemetry sample application. + + Optional settings for prefix 'TELEMETRY_SAMPLE_' are: + - connection_string: str - The connection string for the Application Insights resource. + This value can be found in the Overview section when examining + your resource from the Azure portal. + (Env var TELEMETRY_SAMPLE_CONNECTION_STRING) + - otlp_endpoint: str - The OTLP endpoint to send telemetry data to. + Depending on the exporter used, you may find this value in different places. + (Env var TELEMETRY_SAMPLE_OTLP_ENDPOINT) + + If no connection string or OTLP endpoint is provided, the telemetry data will be + exported to the console. + """ + + env_prefix: ClassVar[str] = "TELEMETRY_SAMPLE_" + + connection_string: str | None = None + otlp_endpoint: str | None = None diff --git a/python/samples/getting_started/00-getting-started.ipynb b/python/samples/getting_started/00-getting-started.ipynb index f2239967c7a3..c7997783b82e 100644 --- a/python/samples/getting_started/00-getting-started.ipynb +++ b/python/samples/getting_started/00-getting-started.ipynb @@ -16,8 +16,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { @@ -229,4 +232,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} \ No newline at end of file +} diff --git a/python/samples/getting_started/01-basic-loading-the-kernel.ipynb b/python/samples/getting_started/01-basic-loading-the-kernel.ipynb index 0b7991f02ae4..d74ac91859e5 100644 --- a/python/samples/getting_started/01-basic-loading-the-kernel.ipynb +++ b/python/samples/getting_started/01-basic-loading-the-kernel.ipynb @@ -23,8 +23,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { @@ -196,7 +199,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.12" }, "polyglot_notebook": { "kernelInfo": { diff --git a/python/samples/getting_started/02-running-prompts-from-file.ipynb b/python/samples/getting_started/02-running-prompts-from-file.ipynb index 12b2d658c068..3d975adca1c2 100644 --- a/python/samples/getting_started/02-running-prompts-from-file.ipynb +++ b/python/samples/getting_started/02-running-prompts-from-file.ipynb @@ -34,8 +34,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/03-prompt-function-inline.ipynb b/python/samples/getting_started/03-prompt-function-inline.ipynb index a4ea1707f42e..27ce5177424d 100644 --- a/python/samples/getting_started/03-prompt-function-inline.ipynb +++ b/python/samples/getting_started/03-prompt-function-inline.ipynb @@ -24,8 +24,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/04-kernel-arguments-chat.ipynb b/python/samples/getting_started/04-kernel-arguments-chat.ipynb index 01ed946274bb..3e9ad0861860 100644 --- a/python/samples/getting_started/04-kernel-arguments-chat.ipynb +++ b/python/samples/getting_started/04-kernel-arguments-chat.ipynb @@ -26,8 +26,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/05-using-the-planner.ipynb b/python/samples/getting_started/05-using-the-planner.ipynb index e7a0f371f19f..c8e7f4d58994 100644 --- a/python/samples/getting_started/05-using-the-planner.ipynb +++ b/python/samples/getting_started/05-using-the-planner.ipynb @@ -31,8 +31,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { @@ -583,4 +586,4 @@ }, "nbformat": 4, "nbformat_minor": 5 -} \ No newline at end of file +} diff --git a/python/samples/getting_started/06-memory-and-embeddings.ipynb b/python/samples/getting_started/06-memory-and-embeddings.ipynb index fcece19a6223..37ad963e983f 100644 --- a/python/samples/getting_started/06-memory-and-embeddings.ipynb +++ b/python/samples/getting_started/06-memory-and-embeddings.ipynb @@ -36,10 +36,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0\n", - "%pip install azure-core==1.30.1\n", - "%pip install azure-search-documents==11.6.0b4" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel[azure]\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { @@ -611,7 +612,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/python/samples/getting_started/07-hugging-face-for-plugins.ipynb b/python/samples/getting_started/07-hugging-face-for-plugins.ipynb index a99b0294cd18..22e27fd6cdf0 100644 --- a/python/samples/getting_started/07-hugging-face-for-plugins.ipynb +++ b/python/samples/getting_started/07-hugging-face-for-plugins.ipynb @@ -20,8 +20,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel[hugging_face]==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/08-native-function-inline.ipynb b/python/samples/getting_started/08-native-function-inline.ipynb index f414a8424bfc..0ce818d6f757 100644 --- a/python/samples/getting_started/08-native-function-inline.ipynb +++ b/python/samples/getting_started/08-native-function-inline.ipynb @@ -54,8 +54,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/09-groundedness-checking.ipynb b/python/samples/getting_started/09-groundedness-checking.ipynb index f603b615da19..5e87acce7e68 100644 --- a/python/samples/getting_started/09-groundedness-checking.ipynb +++ b/python/samples/getting_started/09-groundedness-checking.ipynb @@ -35,8 +35,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/10-multiple-results-per-prompt.ipynb b/python/samples/getting_started/10-multiple-results-per-prompt.ipynb index f4accdc8a8cb..c3edd2fd5c45 100644 --- a/python/samples/getting_started/10-multiple-results-per-prompt.ipynb +++ b/python/samples/getting_started/10-multiple-results-per-prompt.ipynb @@ -33,8 +33,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { @@ -501,7 +504,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.3" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/python/samples/getting_started/11-streaming-completions.ipynb b/python/samples/getting_started/11-streaming-completions.ipynb index 50e2d8684ec4..ca26d21ead5c 100644 --- a/python/samples/getting_started/11-streaming-completions.ipynb +++ b/python/samples/getting_started/11-streaming-completions.ipynb @@ -26,8 +26,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Note: if using a Poetry virtual environment, do not run this cell\n", - "%pip install semantic-kernel==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started/CONFIGURING_THE_KERNEL.md b/python/samples/getting_started/CONFIGURING_THE_KERNEL.md index 3c299be623d1..99ab17f8b7ad 100644 --- a/python/samples/getting_started/CONFIGURING_THE_KERNEL.md +++ b/python/samples/getting_started/CONFIGURING_THE_KERNEL.md @@ -61,3 +61,7 @@ chat_completion = AzureChatCompletion(service_id="test", env_file_path=env_file_ - Manually configure the `api_key` or required parameters on either the `OpenAIChatCompletion` or `AzureChatCompletion` constructor with keyword arguments. - This requires the user to manage their own keys/secrets as they aren't relying on the underlying environment variables or `.env` file. + +### 4. Microsoft Entra Authentication + +To learn how to use a Microsoft Entra Authentication token to authenticate to your Azure OpenAI resource, please navigate to the following [guide](../concepts/README.md#microsoft-entra-token-authentication). \ No newline at end of file diff --git a/python/samples/getting_started/third_party/.env.example b/python/samples/getting_started/third_party/.env.example index 9a1b4b6cfb38..719509acb588 100644 --- a/python/samples/getting_started/third_party/.env.example +++ b/python/samples/getting_started/third_party/.env.example @@ -11,5 +11,21 @@ AZURE_OPENAI_ENDPOINT="" AZURE_OPENAI_API_KEY="" AZURE_AISEARCH_API_KEY="" AZURE_AISEARCH_URL="" + +# -- WEAVIATE SETTINGS -- + WEAVIATE_URL="http://localhost:8080" # WEAVIATE_API_KEY="" + +# -- POSTGRES SETTINGS -- + +# Set either POSTGRES_CONNECTION_STRING or the individual PG settings below + +POSTGRES_CONNECTION_STRING="" + +# PGHOST="" +# PGPORT="" +# PGDATABASE="" +# PGUSER="" +# PGPASSWORD="" +# PGSSL_MODE="" \ No newline at end of file diff --git a/python/samples/getting_started/third_party/postgres-memory.ipynb b/python/samples/getting_started/third_party/postgres-memory.ipynb new file mode 100644 index 000000000000..b0069a59a1c7 --- /dev/null +++ b/python/samples/getting_started/third_party/postgres-memory.ipynb @@ -0,0 +1,392 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using Postgres as memory\n", + "\n", + "This notebook shows how to use Postgres as a memory store in Semantic Kernel.\n", + "\n", + "The code below pulls the most recent papers from [ArviX](https://arxiv.org/), creates embeddings from the paper abstracts, and stores them in a Postgres database.\n", + "\n", + "In the future, we can use the Postgres vector store to search the database for similar papers based on the embeddings - stay tuned!" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import textwrap\n", + "import xml.etree.ElementTree as ET\n", + "from dataclasses import dataclass\n", + "from datetime import datetime\n", + "from typing import Annotated, Any\n", + "\n", + "import numpy as np\n", + "import requests\n", + "\n", + "from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import (\n", + " OpenAIEmbeddingPromptExecutionSettings,\n", + ")\n", + "from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding\n", + "from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding\n", + "from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection\n", + "from semantic_kernel.data.const import DistanceFunction, IndexKind\n", + "from semantic_kernel.data.vector_store_model_decorator import vectorstoremodel\n", + "from semantic_kernel.data.vector_store_record_fields import (\n", + " VectorStoreRecordDataField,\n", + " VectorStoreRecordKeyField,\n", + " VectorStoreRecordVectorField,\n", + ")\n", + "from semantic_kernel.data.vector_store_record_utils import VectorStoreRecordUtils\n", + "from semantic_kernel.kernel import Kernel" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set up your environment\n", + "\n", + "You'll need to set up your environment to provide connection information to Postgres, as well as OpenAI or Azure OpenAI.\n", + "\n", + "To do this, copy the `.env.example` file to `.env` and fill in the necessary information.\n", + "\n", + "### Postgres configuration\n", + "\n", + "You'll need to provide a connection string to a Postgres database. You can use a local Postgres instance, or a cloud-hosted one.\n", + "You can provide a connection string, or provide environment variables with the connection information. See the .env.example file for `POSTGRES_` settings.\n", + "\n", + "#### Using Docker\n", + "\n", + "You can also use docker to bring up a Postgres instance by following the steps below:\n", + "\n", + "Create an `init.sql` that has the following:\n", + "\n", + "```sql\n", + "CREATE EXTENSION IF NOT EXISTS vector;\n", + "```\n", + "\n", + "Now you can start a postgres instance with the following:\n", + "\n", + "```\n", + "docker pull pgvector/pgvector:pg16\n", + "docker run --rm -it --name pgvector -p 5432:5432 -v ./init.sql:/docker-entrypoint-initdb.d/init.sql -e POSTGRES_PASSWORD=example pgvector/pgvector:pg16\n", + "```\n", + "\n", + "_Note_: Use `.\\init.sql` on Windows and `./init.sql` on WSL or Linux/Mac.\n", + "\n", + "Then you could use the connection string:\n", + "\n", + "```\n", + "POSTGRES_CONNECTION_STRING=\"host=localhost port=5432 dbname=postgres user=postgres password=example\"\n", + "```\n", + "\n", + "### OpenAI configuration\n", + "\n", + "You can either use OpenAI or Azure OpenAI APIs. You provide the API key and other configuration in the `.env` file. Set either the `OPENAI_` or `AZURE_OPENAI_` settings.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# Path to the environment file\n", + "env_file_path = \".env\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we set some additional configuration." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# -- ArXiv settings --\n", + "\n", + "# The search term to use when searching for papers on arXiv. All metadata fields for the papers are searched.\n", + "SEARCH_TERM = \"generative ai\"\n", + "\n", + "# The category of papers to search for on arXiv. See https://arxiv.org/category_taxonomy for a list of categories.\n", + "ARVIX_CATEGORY = \"cs.AI\"\n", + "\n", + "# The maximum number of papers to search for on arXiv.\n", + "MAX_RESULTS = 10\n", + "\n", + "# -- OpenAI settings --\n", + "\n", + "# Set this flag to False to use the OpenAI API instead of Azure OpenAI\n", + "USE_AZURE_OPENAI = True\n", + "\n", + "# The name of the OpenAI model or Azure OpenAI deployment to use\n", + "EMBEDDING_MODEL = \"text-embedding-3-small\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we define a vector store model. This model defines the table and column names for storing the embeddings. We use the `@vectorstoremodel` decorator to tell Semantic Kernel to create a vector store definition from the model. The VectorStoreRecordField annotations define the fields that will be stored in the database, including key and vector fields." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "@vectorstoremodel\n", + "@dataclass\n", + "class ArxivPaper:\n", + " id: Annotated[str, VectorStoreRecordKeyField()]\n", + " title: Annotated[str, VectorStoreRecordDataField()]\n", + " abstract: Annotated[str, VectorStoreRecordDataField(has_embedding=True, embedding_property_name=\"abstract_vector\")]\n", + " published: Annotated[datetime, VectorStoreRecordDataField()]\n", + " authors: Annotated[list[str], VectorStoreRecordDataField()]\n", + " link: Annotated[str | None, VectorStoreRecordDataField()]\n", + "\n", + " abstract_vector: Annotated[\n", + " np.ndarray | None,\n", + " VectorStoreRecordVectorField(\n", + " embedding_settings={\"embedding\": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)},\n", + " index_kind=IndexKind.HNSW,\n", + " dimensions=1536,\n", + " distance_function=DistanceFunction.COSINE,\n", + " property_type=\"float\",\n", + " serialize_function=np.ndarray.tolist,\n", + " deserialize_function=np.array,\n", + " ),\n", + " ] = None\n", + "\n", + " @classmethod\n", + " def from_arxiv_info(cls, arxiv_info: dict[str, Any]) -> \"ArxivPaper\":\n", + " return cls(\n", + " id=arxiv_info[\"id\"],\n", + " title=arxiv_info[\"title\"].replace(\"\\n \", \" \"),\n", + " abstract=arxiv_info[\"abstract\"].replace(\"\\n \", \" \"),\n", + " published=arxiv_info[\"published\"],\n", + " authors=arxiv_info[\"authors\"],\n", + " link=arxiv_info[\"link\"],\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Below is a function that queries the ArviX API for the most recent papers based on our search query and category." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def query_arxiv(search_query: str, category: str = \"cs.AI\", max_results: int = 10) -> list[dict[str, Any]]:\n", + " \"\"\"\n", + " Query the ArXiv API and return a list of dictionaries with relevant metadata for each paper.\n", + "\n", + " Args:\n", + " search_query: The search term or topic to query for.\n", + " category: The category to restrict the search to (default is \"cs.AI\").\n", + " See https://arxiv.org/category_taxonomy for a list of categories.\n", + " max_results: Maximum number of results to retrieve (default is 10).\n", + " \"\"\"\n", + " response = requests.get(\n", + " \"http://export.arxiv.org/api/query?\"\n", + " f\"search_query=all:%22{search_query.replace(' ', '+')}%22\"\n", + " f\"+AND+cat:{category}&start=0&max_results={max_results}&sortBy=lastUpdatedDate&sortOrder=descending\"\n", + " )\n", + "\n", + " root = ET.fromstring(response.content)\n", + " ns = {\"atom\": \"http://www.w3.org/2005/Atom\"}\n", + "\n", + " return [\n", + " {\n", + " \"id\": entry.find(\"atom:id\", ns).text.split(\"/\")[-1],\n", + " \"title\": entry.find(\"atom:title\", ns).text,\n", + " \"abstract\": entry.find(\"atom:summary\", ns).text,\n", + " \"published\": entry.find(\"atom:published\", ns).text,\n", + " \"link\": entry.find(\"atom:id\", ns).text,\n", + " \"authors\": [author.find(\"atom:name\", ns).text for author in entry.findall(\"atom:author\", ns)],\n", + " \"categories\": [category.get(\"term\") for category in entry.findall(\"atom:category\", ns)],\n", + " \"pdf_link\": next(\n", + " (link_tag.get(\"href\") for link_tag in entry.findall(\"atom:link\", ns) if link_tag.get(\"title\") == \"pdf\"),\n", + " None,\n", + " ),\n", + " }\n", + " for entry in root.findall(\"atom:entry\", ns)\n", + " ]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We use this function to query papers and store them in memory as our model types." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "arxiv_papers: list[ArxivPaper] = [\n", + " ArxivPaper.from_arxiv_info(paper)\n", + " for paper in query_arxiv(SEARCH_TERM, category=ARVIX_CATEGORY, max_results=MAX_RESULTS)\n", + "]\n", + "\n", + "print(f\"Found {len(arxiv_papers)} papers on '{SEARCH_TERM}'\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a `PostgresCollection`, which represents the table in Postgres where we will store the paper information and embeddings." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "collection = PostgresCollection[str, ArxivPaper](\n", + " collection_name=\"arxiv_papers\", data_model_type=ArxivPaper, env_file_path=env_file_path\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a Kernel and add the TextEmbedding service, which will be used to generate embeddings of the abstract for each paper." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "kernel = Kernel()\n", + "if USE_AZURE_OPENAI:\n", + " text_embedding = AzureTextEmbedding(\n", + " service_id=\"embedding\", deployment_name=EMBEDDING_MODEL, env_file_path=env_file_path\n", + " )\n", + "else:\n", + " text_embedding = OpenAITextEmbedding(\n", + " service_id=\"embedding\", ai_model_id=EMBEDDING_MODEL, env_file_path=env_file_path\n", + " )\n", + "\n", + "kernel.add_service(text_embedding)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we use VectorStoreRecordUtils to add embeddings to our models." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "records = await VectorStoreRecordUtils(kernel).add_vector_to_records(arxiv_papers, data_model_type=ArxivPaper)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now that the models have embeddings, we can write them into the Postgres database." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "async with collection:\n", + " await collection.create_collection_if_not_exists()\n", + " keys = await collection.upsert_batch(records)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we retrieve the first few models from the database and print out their information." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "async with collection:\n", + " results = await collection.get_batch(keys[:3])\n", + " if results:\n", + " for result in results:\n", + " print(f\"# {result.title}\")\n", + " print()\n", + " wrapped_abstract = textwrap.fill(result.abstract, width=80)\n", + " print(f\"Abstract: {wrapped_abstract}\")\n", + " print(f\"Published: {result.published}\")\n", + " print(f\"Link: {result.link}\")\n", + " print(f\"PDF Link: {result.link}\")\n", + " print(f\"Authors: {', '.join(result.authors)}\")\n", + " print(f\"Embedding: {result.abstract_vector}\")\n", + " print()\n", + " print()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "...searching Postgres memory coming soon, to be continued!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.15" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/python/samples/getting_started/third_party/weaviate-persistent-memory.ipynb b/python/samples/getting_started/third_party/weaviate-persistent-memory.ipynb index ed15f5ab82cc..a9b2635e5442 100644 --- a/python/samples/getting_started/third_party/weaviate-persistent-memory.ipynb +++ b/python/samples/getting_started/third_party/weaviate-persistent-memory.ipynb @@ -156,7 +156,11 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install semantic-kernel[weaviate]==1.6.0" + "# Note: if using a virtual environment, do not run this cell\n", + "%pip install -U semantic-kernel[weaviate]\n", + "from semantic_kernel import __version__\n", + "\n", + "__version__" ] }, { diff --git a/python/samples/getting_started_with_agents/README.md b/python/samples/getting_started_with_agents/README.md index 3d8f471c7967..6028db397406 100644 --- a/python/samples/getting_started_with_agents/README.md +++ b/python/samples/getting_started_with_agents/README.md @@ -6,6 +6,7 @@ This project contains a step by step guide to get started with _Semantic Kernel - For the use of Chat Completion agents, the minimum allowed Semantic Kernel pypi version is 1.3.0. - For the use of OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.4.0. - For the use of Agent Group Chat, the minimum allowed Semantic kernel pypi version is 1.6.0. +- For the use of Streaming OpenAI Assistant agents, the minimum allowed Semantic Kernel pypi version is 1.11.0 #### Source diff --git a/python/samples/getting_started_with_agents/step1_agent.py b/python/samples/getting_started_with_agents/step1_agent.py index e8d1e93882a1..28d19a45df1f 100644 --- a/python/samples/getting_started_with_agents/step1_agent.py +++ b/python/samples/getting_started_with_agents/step1_agent.py @@ -37,7 +37,7 @@ async def invoke_agent(agent: ChatCompletionAgent, input: str, chat: ChatHistory contents.append(content) streaming_chat_message = reduce(lambda first, second: first + second, contents) print(f"# {content.role} - {content_name or '*'}: '{streaming_chat_message}'") - chat.add_message(content) + chat.add_message(streaming_chat_message) else: async for content in agent.invoke(chat): print(f"# {content.role} - {content.name or '*'}: '{content.content}'") diff --git a/python/samples/getting_started_with_agents/step2_plugins.py b/python/samples/getting_started_with_agents/step2_plugins.py index 134e85f1ffd2..53772408211d 100644 --- a/python/samples/getting_started_with_agents/step2_plugins.py +++ b/python/samples/getting_started_with_agents/step2_plugins.py @@ -70,7 +70,6 @@ async def main(): # Create the instance of the Kernel kernel = Kernel() - # Add the OpenAIChatCompletion AI Service to the Kernel service_id = "agent" kernel.add_service(AzureChatCompletion(service_id=service_id)) @@ -78,7 +77,7 @@ async def main(): # Configure the function choice behavior to auto invoke kernel functions settings.function_choice_behavior = FunctionChoiceBehavior.Auto() - kernel.add_plugin(plugin=MenuPlugin(), plugin_name="menu") + kernel.add_plugin(MenuPlugin(), plugin_name="menu") # Create the agent agent = ChatCompletionAgent( diff --git a/python/samples/getting_started_with_agents/step7_assistant.py b/python/samples/getting_started_with_agents/step7_assistant.py index 6f1242f62ccc..67235c0dcf3c 100644 --- a/python/samples/getting_started_with_agents/step7_assistant.py +++ b/python/samples/getting_started_with_agents/step7_assistant.py @@ -20,7 +20,7 @@ HOST_INSTRUCTIONS = "Answer questions about the menu." # Note: you may toggle this to switch between AzureOpenAI and OpenAI -use_azure_openai = True +use_azure_openai = False # Define a sample plugin for the sample diff --git a/python/samples/getting_started_with_agents/step8_assistant_vision.py b/python/samples/getting_started_with_agents/step8_assistant_vision.py index 62a796cd9f52..ac7bf34d7e48 100644 --- a/python/samples/getting_started_with_agents/step8_assistant_vision.py +++ b/python/samples/getting_started_with_agents/step8_assistant_vision.py @@ -35,6 +35,9 @@ def create_message_with_image_reference(input: str, file_id: str) -> ChatMessage ) +streaming = False + + # A helper method to invoke the agent with the user input async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, message: ChatMessageContent) -> None: """Invoke the agent with the user input.""" @@ -42,9 +45,19 @@ async def invoke_agent(agent: OpenAIAssistantAgent, thread_id: str, message: Cha print(f"# {AuthorRole.USER}: '{message.items[0].text}'") - async for content in agent.invoke(thread_id=thread_id): - if content.role != AuthorRole.TOOL: - print(f"# {content.role}: {content.content}") + if streaming: + first_chunk = True + async for content in agent.invoke_stream(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + if first_chunk: + print(f"# {content.role}: ", end="", flush=True) + first_chunk = False + print(content.content, end="", flush=True) + print() + else: + async for content in agent.invoke(thread_id=thread_id): + if content.role != AuthorRole.TOOL: + print(f"# {content.role}: {content.content}") async def main(): diff --git a/python/samples/learn_resources/agent_docs/agent_collaboration.py b/python/samples/learn_resources/agent_docs/agent_collaboration.py new file mode 100644 index 000000000000..c8e7d04bffb6 --- /dev/null +++ b/python/samples/learn_resources/agent_docs/agent_collaboration.py @@ -0,0 +1,173 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents import AgentGroupChat, ChatCompletionAgent +from semantic_kernel.agents.strategies.selection.kernel_function_selection_strategy import ( + KernelFunctionSelectionStrategy, +) +from semantic_kernel.agents.strategies.termination.kernel_function_termination_strategy import ( + KernelFunctionTerminationStrategy, +) +from semantic_kernel.agents.strategies.termination.termination_strategy import TerminationStrategy +from semantic_kernel.connectors.ai.open_ai.services.azure_chat_completion import AzureChatCompletion +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.functions.kernel_function_from_prompt import KernelFunctionFromPrompt +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# agent group chat that utilizes a Reviewer Chat Completion # +# Agent along with a Writer Chat Completion Agent to # +# complete a user's task. # +################################################################### + + +class ApprovalTerminationStrategy(TerminationStrategy): + """A strategy for determining when an agent should terminate.""" + + async def should_agent_terminate(self, agent, history): + """Check if the agent should terminate.""" + return "approved" in history[-1].content.lower() + + +REVIEWER_NAME = "Reviewer" +COPYWRITER_NAME = "Writer" + + +def _create_kernel_with_chat_completion(service_id: str) -> Kernel: + kernel = Kernel() + kernel.add_service(AzureChatCompletion(service_id=service_id)) + return kernel + + +async def main(): + agent_reviewer = ChatCompletionAgent( + service_id=REVIEWER_NAME, + kernel=_create_kernel_with_chat_completion(REVIEWER_NAME), + name=REVIEWER_NAME, + instructions=""" + Your responsibility is to review and identify how to improve user provided content. + If the user has providing input or direction for content already provided, specify how to + address this input. + Never directly perform the correction or provide example. + Once the content has been updated in a subsequent response, you will review the content + again until satisfactory. + Always copy satisfactory content to the clipboard using available tools and inform user. + + RULES: + - Only identify suggestions that are specific and actionable. + - Verify previous suggestions have been addressed. + - Never repeat previous suggestions. + """, + ) + + agent_writer = ChatCompletionAgent( + service_id=COPYWRITER_NAME, + kernel=_create_kernel_with_chat_completion(COPYWRITER_NAME), + name=COPYWRITER_NAME, + instructions=""" + Your sole responsibility is to rewrite content according to review suggestions. + + - Always apply all review direction. + - Always revise the content in its entirety without explanation. + - Never address the user. + """, + ) + + selection_function = KernelFunctionFromPrompt( + function_name="selection", + prompt=f""" + Determine which participant takes the next turn in a conversation based on the the most recent participant. + State only the name of the participant to take the next turn. + No participant should take more than one turn in a row. + + Choose only from these participants: + - {REVIEWER_NAME} + - {COPYWRITER_NAME} + + Always follow these rules when selecting the next participant: + - After user input, it is {COPYWRITER_NAME}'s turn. + - After {COPYWRITER_NAME} replies, it is {REVIEWER_NAME}'s turn. + - After {REVIEWER_NAME} provides feedback, it is {COPYWRITER_NAME}'s turn. + + History: + {{{{$history}}}} + """, + ) + + TERMINATION_KEYWORD = "yes" + + termination_function = KernelFunctionFromPrompt( + function_name="termination", + prompt=f""" + Examine the RESPONSE and determine whether the content has been deemed satisfactory. + If content is satisfactory, respond with a single word without explanation: {TERMINATION_KEYWORD}. + If specific suggestions are being provided, it is not satisfactory. + If no correction is suggested, it is satisfactory. + + RESPONSE: + {{{{$history}}}} + """, + ) + + chat = AgentGroupChat( + agents=[agent_writer, agent_reviewer], + selection_strategy=KernelFunctionSelectionStrategy( + function=selection_function, + kernel=_create_kernel_with_chat_completion("selection"), + result_parser=lambda result: str(result.value[0]) if result.value is not None else COPYWRITER_NAME, + agent_variable_name="agents", + history_variable_name="history", + ), + termination_strategy=KernelFunctionTerminationStrategy( + agents=[agent_reviewer], + function=termination_function, + kernel=_create_kernel_with_chat_completion("termination"), + result_parser=lambda result: TERMINATION_KEYWORD in str(result.value[0]).lower(), + history_variable_name="history", + maximum_iterations=10, + ), + ) + + is_complete: bool = False + while not is_complete: + user_input = input("User:> ") + if not user_input: + continue + + if user_input.lower() == "exit": + is_complete = True + break + + if user_input.lower() == "reset": + await chat.reset() + print("[Conversation has been reset]") + continue + + if user_input.startswith("@") and len(input) > 1: + file_path = input[1:] + try: + if not os.path.exists(file_path): + print(f"Unable to access file: {file_path}") + continue + with open(file_path) as file: + user_input = file.read() + except Exception: + print(f"Unable to access file: {file_path}") + continue + + await chat.add_chat_message(ChatMessageContent(role=AuthorRole.USER, content=user_input)) + + async for response in chat.invoke(): + print(f"# {response.role} - {response.name or '*'}: '{response.content}'") + + if chat.is_complete: + is_complete = True + break + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/learn_resources/agent_docs/assistant_code.py b/python/samples/learn_resources/agent_docs/assistant_code.py new file mode 100644 index 000000000000..06a7bc8b29fd --- /dev/null +++ b/python/samples/learn_resources/agent_docs/assistant_code.py @@ -0,0 +1,118 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# OpenAI assistant agent that utilizes the code interpreter # +# to analyze uploaded files. # +################################################################### + +# Let's form the file paths that we will later pass to the assistant +csv_file_path_1 = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "PopulationByAdmin1.csv", +) + +csv_file_path_2 = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), + "PopulationByCountry.csv", +) + + +async def download_file_content(agent, file_id: str): + try: + # Fetch the content of the file using the provided method + response_content = await agent.client.files.content(file_id) + + # Get the current working directory of the file + current_directory = os.path.dirname(os.path.abspath(__file__)) + + # Define the path to save the image in the current directory + file_path = os.path.join( + current_directory, # Use the current directory of the file + f"{file_id}.png", # You can modify this to use the actual filename with proper extension + ) + + # Save content to a file asynchronously + with open(file_path, "wb") as file: + file.write(response_content.content) + + print(f"File saved to: {file_path}") + except Exception as e: + print(f"An error occurred while downloading file {file_id}: {str(e)}") + + +async def download_response_image(agent, file_ids: list[str]): + if file_ids: + # Iterate over file_ids and download each one + for file_id in file_ids: + await download_file_content(agent, file_id) + + +async def main(): + agent = await AzureAssistantAgent.create( + kernel=Kernel(), + service_id="agent", + name="SampleAssistantAgent", + instructions=""" + Analyze the available data to provide an answer to the user's question. + Always format response using markdown. + Always include a numerical index that starts at 1 for any lists or tables. + Always sort lists in ascending order. + """, + enable_code_interpreter=True, + code_interpreter_filenames=[csv_file_path_1, csv_file_path_2], + ) + + print("Creating thread...") + thread_id = await agent.create_thread() + + try: + is_complete: bool = False + file_ids: list[str] = [] + while not is_complete: + user_input = input("User:> ") + if not user_input: + continue + + if user_input.lower() == "exit": + is_complete = True + + await agent.add_chat_message( + thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) + ) + is_code: bool = False + async for response in agent.invoke_stream(thread_id=thread_id): + if is_code != response.metadata.get("code"): + print() + is_code = not is_code + + print(f"{response.content}", end="", flush=True) + + file_ids.extend([ + item.file_id for item in response.items if isinstance(item, StreamingFileReferenceContent) + ]) + + print() + + await download_response_image(agent, file_ids) + file_ids.clear() + + finally: + print("Cleaning up resources...") + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.code_interpreter_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/learn_resources/agent_docs/assistant_search.py b/python/samples/learn_resources/agent_docs/assistant_search.py new file mode 100644 index 000000000000..5d91786e9bc4 --- /dev/null +++ b/python/samples/learn_resources/agent_docs/assistant_search.py @@ -0,0 +1,87 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os + +from semantic_kernel.agents.open_ai.azure_assistant_agent import AzureAssistantAgent +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +################################################################### +# The following sample demonstrates how to create a simple, # +# OpenAI assistant agent that utilizes the vector store # +# to answer questions based on the uploaded documents. # +################################################################### + + +def get_filepath_for_filename(filename: str) -> str: + base_directory = os.path.dirname(os.path.realpath(__file__)) + return os.path.join(base_directory, filename) + + +filenames = [ + "Grimms-The-King-of-the-Golden-Mountain.txt", + "Grimms-The-Water-of-Life.txt", + "Grimms-The-White-Snake.txt", +] + + +async def main(): + agent = await AzureAssistantAgent.create( + kernel=Kernel(), + service_id="agent", + name="SampleAssistantAgent", + instructions=""" + The document store contains the text of fictional stories. + Always analyze the document store to provide an answer to the user's question. + Never rely on your knowledge of stories not included in the document store. + Always format response using markdown. + """, + enable_file_search=True, + vector_store_filenames=[get_filepath_for_filename(filename) for filename in filenames], + ) + + print("Creating thread...") + thread_id = await agent.create_thread() + + try: + is_complete: bool = False + while not is_complete: + user_input = input("User:> ") + if not user_input: + continue + + if user_input.lower() == "exit": + is_complete = True + + await agent.add_chat_message( + thread_id=thread_id, message=ChatMessageContent(role=AuthorRole.USER, content=user_input) + ) + + footnotes: list[StreamingAnnotationContent] = [] + async for response in agent.invoke_stream(thread_id=thread_id): + footnotes.extend([item for item in response.items if isinstance(item, StreamingAnnotationContent)]) + + print(f"{response.content}", end="", flush=True) + + print() + + if len(footnotes) > 0: + for footnote in footnotes: + print( + f"\n`{footnote.quote}` => {footnote.file_id} " + f"(Index: {footnote.start_index} - {footnote.end_index})" + ) + + finally: + print("Cleaning up resources...") + if agent is not None: + [await agent.delete_file(file_id) for file_id in agent.file_search_file_ids] + await agent.delete_thread(thread_id) + await agent.delete() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/learn_resources/agent_docs/chat_agent.py b/python/samples/learn_resources/agent_docs/chat_agent.py new file mode 100644 index 000000000000..56429d5974cb --- /dev/null +++ b/python/samples/learn_resources/agent_docs/chat_agent.py @@ -0,0 +1,84 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio +import os +import sys +from datetime import datetime + +from semantic_kernel.agents import ChatCompletionAgent +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.kernel import Kernel + +# Adjust the sys.path so we can use the GitHubPlugin and GitHubSettings classes +# This is so we can run the code from the samples/learn_resources/agent_docs directory +# If you are running code from your own project, you may not need need to do this. +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + +from plugins.GithubPlugin.github import GitHubPlugin, GitHubSettings # noqa: E402 + +################################################################### +# The following sample demonstrates how to create a simple, # +# ChatCompletionAgent to use a GitHub plugin to interact # +# with the GitHub API. # +################################################################### + + +async def main(): + kernel = Kernel() + + # Add the AzureChatCompletion AI Service to the Kernel + service_id = "agent" + kernel.add_service(AzureChatCompletion(service_id=service_id)) + + settings = kernel.get_prompt_execution_settings_from_service_id(service_id=service_id) + # Configure the function choice behavior to auto invoke kernel functions + settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + # Set your GitHub Personal Access Token (PAT) value here + gh_settings = GitHubSettings(token="") # nosec + kernel.add_plugin(plugin=GitHubPlugin(gh_settings), plugin_name="GithubPlugin") + + current_time = datetime.now().isoformat() + + # Create the agent + agent = ChatCompletionAgent( + service_id="agent", + kernel=kernel, + name="SampleAssistantAgent", + instructions=f""" + You are an agent designed to query and retrieve information from a single GitHub repository in a read-only + manner. + You are also able to access the profile of the active user. + + Use the current date and time to provide up-to-date details or time-sensitive responses. + + The repository you are querying is a public repository with the following name: microsoft/semantic-kernel + + The current date and time is: {current_time}. + """, + execution_settings=settings, + ) + + history = ChatHistory() + is_complete: bool = False + while not is_complete: + user_input = input("User:> ") + if not user_input: + continue + + if user_input.lower() == "exit": + is_complete = True + break + + history.add_message(ChatMessageContent(role=AuthorRole.USER, content=user_input)) + + async for response in agent.invoke(history=history): + print(f"{response.content}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/learn_resources/plugins/GithubPlugin/github.py b/python/samples/learn_resources/plugins/GithubPlugin/github.py new file mode 100644 index 000000000000..ae3a32a5227e --- /dev/null +++ b/python/samples/learn_resources/plugins/GithubPlugin/github.py @@ -0,0 +1,118 @@ +# Copyright (c) Microsoft. All rights reserved. + + +import httpx +from pydantic import BaseModel, Field + +from semantic_kernel.functions.kernel_function_decorator import kernel_function + +# region GitHub Models + + +class Repo(BaseModel): + id: int = Field(..., alias="id") + name: str = Field(..., alias="full_name") + description: str | None = Field(None, alias="description") + url: str = Field(..., alias="html_url") + + +class User(BaseModel): + id: int = Field(..., alias="id") + login: str = Field(..., alias="login") + name: str | None = Field(None, alias="name") + company: str | None = Field(None, alias="company") + url: str = Field(..., alias="html_url") + + +class Label(BaseModel): + id: int = Field(..., alias="id") + name: str = Field(..., alias="name") + description: str | None = Field(None, alias="description") + + +class Issue(BaseModel): + id: int = Field(..., alias="id") + number: int = Field(..., alias="number") + url: str = Field(..., alias="html_url") + title: str = Field(..., alias="title") + state: str = Field(..., alias="state") + labels: list[Label] = Field(..., alias="labels") + when_created: str | None = Field(None, alias="created_at") + when_closed: str | None = Field(None, alias="closed_at") + + +class IssueDetail(Issue): + body: str | None = Field(None, alias="body") + + +# endregion + + +class GitHubSettings(BaseModel): + base_url: str = "https://api.github.com" + token: str + + +class GitHubPlugin: + def __init__(self, settings: GitHubSettings): + self.settings = settings + + @kernel_function + async def get_user_profile(self) -> "User": + async with self.create_client() as client: + response = await self.make_request(client, "/user") + return User(**response) + + @kernel_function + async def get_repository(self, organization: str, repo: str) -> "Repo": + async with self.create_client() as client: + response = await self.make_request(client, f"/repos/{organization}/{repo}") + return Repo(**response) + + @kernel_function + async def get_issues( + self, + organization: str, + repo: str, + max_results: int | None = None, + state: str = "", + label: str = "", + assignee: str = "", + ) -> list["Issue"]: + async with self.create_client() as client: + path = f"/repos/{organization}/{repo}/issues?" + path = self.build_query(path, "state", state) + path = self.build_query(path, "assignee", assignee) + path = self.build_query(path, "labels", label) + path = self.build_query(path, "per_page", str(max_results) if max_results else "") + response = await self.make_request(client, path) + return [Issue(**issue) for issue in response] + + @kernel_function + async def get_issue_detail(self, organization: str, repo: str, issue_id: int) -> "IssueDetail": + async with self.create_client() as client: + path = f"/repos/{organization}/{repo}/issues/{issue_id}" + response = await self.make_request(client, path) + return IssueDetail(**response) + + def create_client(self) -> httpx.AsyncClient: + headers = { + "User-Agent": "request", + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {self.settings.token}", + "X-GitHub-Api-Version": "2022-11-28", + } + return httpx.AsyncClient(base_url=self.settings.base_url, headers=headers) + + @staticmethod + def build_query(path: str, key: str, value: str) -> str: + if value: + return f"{path}{key}={value}&" + return path + + @staticmethod + async def make_request(client: httpx.AsyncClient, path: str) -> dict: + print(f"REQUEST: {path}\n") + response = await client.get(path) + response.raise_for_status() + return response.json() diff --git a/python/samples/learn_resources/resources/Grimms-The-King-of-the-Golden-Mountain.txt b/python/samples/learn_resources/resources/Grimms-The-King-of-the-Golden-Mountain.txt new file mode 100644 index 000000000000..a5279d6e8c51 --- /dev/null +++ b/python/samples/learn_resources/resources/Grimms-The-King-of-the-Golden-Mountain.txt @@ -0,0 +1,36 @@ +The King of the Golden Mountain +By the Grimm Brothers + +There was once a merchant who had only one child, a son, that was very young, and barely able to run alone. He had two richly laden ships then making a voyage upon the seas, in which he had embarked all his wealth, in the hope of making great gains, when the news came that both were lost. Thus from being a rich man he became all at once so very poor that nothing was left to him but one small plot of land; and there he often went in an evening to take his walk, and ease his mind of a little of his trouble. + +One day, as he was roaming along in a brown study, thinking with no great comfort on what he had been and what he now was, and was like to be, all on a sudden there stood before him a little, rough-looking, black dwarf. โ€™Prithee, friend, why so sorrowful?โ€™ said he to the merchant; โ€™what is it you take so deeply to heart?โ€™ โ€™If you would do me any good I would willingly tell you,โ€™ said the merchant. โ€™Who knows but I may?โ€™ said the little man: โ€™tell me what ails you, and perhaps you will find I may be of some use.โ€™ Then the merchant told him how all his wealth was gone to the bottom of the sea, and how he had nothing left but that little plot of land. โ€™Oh, trouble not yourself about that,โ€™ said the dwarf; โ€™only undertake to bring me here, twelve years hence, whatever meets you first on your going home, and I will give you as much as you please.โ€™ The merchant thought this was no great thing to ask; that it would most likely be his dog or his cat, or something of that sort, but forgot his little boy Heinel; so he agreed to the bargain, and signed and sealed the bond to do what was asked of him. + +But as he drew near home, his little boy was so glad to see him that he crept behind him, and laid fast hold of his legs, and looked up in his face and laughed. Then the father started, trembling with fear and horror, and saw what it was that he had bound himself to do; but as no gold was come, he made himself easy by thinking that it was only a joke that the dwarf was playing him, and that, at any rate, when the money came, he should see the bearer, and would not take it in. + +About a month afterwards he went upstairs into a lumber-room to look for some old iron, that he might sell it and raise a little money; and there, instead of his iron, he saw a large pile of gold lying on the floor. At the sight of this he was overjoyed, and forgetting all about his son, went into trade again, and became a richer merchant than before. + +Meantime little Heinel grew up, and as the end of the twelve years drew near the merchant began to call to mind his bond, and became very sad and thoughtful; so that care and sorrow were written upon his face. The boy one day asked what was the matter, but his father would not tell for some time; at last, however, he said that he had, without knowing it, sold him for gold to a little, ugly-looking, black dwarf, and that the twelve years were coming round when he must keep his word. Then Heinel said, โ€™Father, give yourself very little trouble about that; I shall be too much for the little man.โ€™ + +When the time came, the father and son went out together to the place agreed upon: and the son drew a circle on the ground, and set himself and his father in the middle of it. The little black dwarf soon came, and walked round and round about the circle, but could not find any way to get into it, and he either could not, or dared not, jump over it. At last the boy said to him. โ€™Have you anything to say to us, my friend, or what do you want?โ€™ Now Heinel had found a friend in a good fairy, that was fond of him, and had told him what to do; for this fairy knew what good luck was in store for him. โ€™Have you brought me what you said you would?โ€™ said the dwarf to the merchant. The old man held his tongue, but Heinel said again, โ€™What do you want here?โ€™ The dwarf said, โ€™I come to talk with your father, not with you.โ€™ โ€™You have cheated and taken in my father,โ€™ said the son; โ€™pray give him up his bond at once.โ€™ โ€™Fair and softly,โ€™ said the little old man; โ€™right is right; I have paid my money, and your father has had it, and spent it; so be so good as to let me have what I paid it for.โ€™ โ€™You must have my consent to that first,โ€™ said Heinel, โ€™so please to step in here, and let us talk it over.โ€™ The old man grinned, and showed his teeth, as if he should have been very glad to get into the circle if he could. Then at last, after a long talk, they came to terms. Heinel agreed that his father must give him up, and that so far the dwarf should have his way: but, on the other hand, the fairy had told Heinel what fortune was in store for him, if he followed his own course; and he did not choose to be given up to his hump-backed friend, who seemed so anxious for his company. + +So, to make a sort of drawn battle of the matter, it was settled that Heinel should be put into an open boat, that lay on the sea-shore hard by; that the father should push him off with his own hand, and that he should thus be set adrift, and left to the bad or good luck of wind and weather. Then he took leave of his father, and set himself in the boat, but before it got far off a wave struck it, and it fell with one side low in the water, so the merchant thought that poor Heinel was lost, and went home very sorrowful, while the dwarf went his way, thinking that at any rate he had had his revenge. + +The boat, however, did not sink, for the good fairy took care of her friend, and soon raised the boat up again, and it went safely on. The young man sat safe within, till at length it ran ashore upon an unknown land. As he jumped upon the shore he saw before him a beautiful castle but empty and dreary within, for it was enchanted. โ€™Here,โ€™ said he to himself, โ€™must I find the prize the good fairy told me of.โ€™ So he once more searched the whole palace through, till at last he found a white snake, lying coiled up on a cushion in one of the chambers. + +Now the white snake was an enchanted princess; and she was very glad to see him, and said, โ€™Are you at last come to set me free? Twelve long years have I waited here for the fairy to bring you hither as she promised, for you alone can save me. This night twelve men will come: their faces will be black, and they will be dressed in chain armour. They will ask what you do here, but give no answer; and let them do what they willโ€“beat, whip, pinch, prick, or torment youโ€“bear all; only speak not a word, and at twelve oโ€™clock they must go away. The second night twelve others will come: and the third night twenty-four, who will even cut off your head; but at the twelfth hour of that night their power is gone, and I shall be free, and will come and bring you the Water of Life, and will wash you with it, and bring you back to life and health.โ€™ And all came to pass as she had said; Heinel bore all, and spoke not a word; and the third night the princess came, and fell on his neck and kissed him. Joy and gladness burst forth throughout the castle, the wedding was celebrated, and he was crowned king of the Golden Mountain. + +They lived together very happily, and the queen had a son. And thus eight years had passed over their heads, when the king thought of his father; and he began to long to see him once again. But the queen was against his going, and said, โ€™I know well that misfortunes will come upon us if you go.โ€™ However, he gave her no rest till she agreed. At his going away she gave him a wishing-ring, and said, โ€™Take this ring, and put it on your finger; whatever you wish it will bring you; only promise never to make use of it to bring me hence to your fatherโ€™s house.โ€™ Then he said he would do what she asked, and put the ring on his finger, and wished himself near the town where his father lived. + +Heinel found himself at the gates in a moment; but the guards would not let him go in, because he was so strangely clad. So he went up to a neighbouring hill, where a shepherd dwelt, and borrowed his old frock, and thus passed unknown into the town. When he came to his fatherโ€™s house, he said he was his son; but the merchant would not believe him, and said he had had but one son, his poor Heinel, who he knew was long since dead: and as he was only dressed like a poor shepherd, he would not even give him anything to eat. The king, however, still vowed that he was his son, and said, โ€™Is there no mark by which you would know me if I am really your son?โ€™ โ€™Yes,โ€™ said his mother, โ€™our Heinel had a mark like a raspberry on his right arm.โ€™ Then he showed them the mark, and they knew that what he had said was true. + +He next told them how he was king of the Golden Mountain, and was married to a princess, and had a son seven years old. But the merchant said, โ€™that can never be true; he must be a fine king truly who travels about in a shepherdโ€™s frock!โ€™ At this the son was vexed; and forgetting his word, turned his ring, and wished for his queen and son. In an instant they stood before him; but the queen wept, and said he had broken his word, and bad luck would follow. He did all he could to soothe her, and she at last seemed to be appeased; but she was not so in truth, and was only thinking how she should punish him. + +One day he took her to walk with him out of the town, and showed her the spot where the boat was set adrift upon the wide waters. Then he sat himself down, and said, โ€™I am very much tired; sit by me, I will rest my head in your lap, and sleep a while.โ€™ As soon as he had fallen asleep, however, she drew the ring from his finger, and crept softly away, and wished herself and her son at home in their kingdom. And when he awoke he found himself alone, and saw that the ring was gone from his finger. โ€™I can never go back to my fatherโ€™s house,โ€™ said he; โ€™they would say I am a sorcerer: I will journey forth into the world, till I come again to my kingdom.โ€™ + +So saying he set out and travelled till he came to a hill, where three giants were sharing their fatherโ€™s goods; and as they saw him pass they cried out and said, โ€™Little men have sharp wits; he shall part the goods between us.โ€™ Now there was a sword that cut off an enemyโ€™s head whenever the wearer gave the words, โ€™Heads off!โ€™; a cloak that made the owner invisible, or gave him any form he pleased; and a pair of boots that carried the wearer wherever he wished. Heinel said they must first let him try these wonderful things, then he might know how to set a value upon them. Then they gave him the cloak, and he wished himself a fly, and in a moment he was a fly. โ€™The cloak is very well,โ€™ said he: โ€™now give me the sword.โ€™ โ€™No,โ€™ said they; โ€™not unless you undertake not to say, โ€œHeads off!โ€ for if you do we are all dead men.โ€™ So they gave it him, charging him to try it on a tree. He next asked for the boots also; and the moment he had all three in his power, he wished himself at the Golden Mountain; and there he was at once. So the giants were left behind with no goods to share or quarrel about. + +As Heinel came near his castle he heard the sound of merry music; and the people around told him that his queen was about to marry another husband. Then he threw his cloak around him, and passed through the castle hall, and placed himself by the side of the queen, where no one saw him. But when anything to eat was put upon her plate, he took it away and ate it himself; and when a glass of wine was handed to her, he took it and drank it; and thus, though they kept on giving her meat and drink, her plate and cup were always empty. + +Upon this, fear and remorse came over her, and she went into her chamber alone, and sat there weeping; and he followed her there. โ€™Alas!โ€™ said she to herself, โ€™was I not once set free? Why then does this enchantment still seem to bind me?โ€™ + +โ€™False and fickle one!โ€™ said he. โ€™One indeed came who set thee free, and he is now near thee again; but how have you used him? Ought he to have had such treatment from thee?โ€™ Then he went out and sent away the company, and said the wedding was at an end, for that he was come back to the kingdom. But the princes, peers, and great men mocked at him. However, he would enter into no parley with them, but only asked them if they would go in peace or not. Then they turned upon him and tried to seize him; but he drew his sword. โ€™Heads Off!โ€™ cried he; and with the word the traitorsโ€™ heads fell before him, and Heinel was once more king of the Golden Mountain. \ No newline at end of file diff --git a/python/samples/learn_resources/resources/Grimms-The-Water-of-Life.txt b/python/samples/learn_resources/resources/Grimms-The-Water-of-Life.txt new file mode 100644 index 000000000000..3a5487beb5cc --- /dev/null +++ b/python/samples/learn_resources/resources/Grimms-The-Water-of-Life.txt @@ -0,0 +1,44 @@ +The Water of Life +By the Grimm Brothers + +Long before you or I were born, there reigned, in a country a great way off, a king who had three sons. This king once fell very illโ€“so ill that nobody thought he could live. His sons were very much grieved at their fatherโ€™s sickness; and as they were walking together very mournfully in the garden of the palace, a little old man met them and asked what was the matter. They told him that their father was very ill, and that they were afraid nothing could save him. โ€™I know what would,โ€™ said the little old man; โ€™it is the Water of Life. If he could have a draught of it he would be well again; but it is very hard to get.โ€™ Then the eldest son said, โ€™I will soon find itโ€™: and he went to the sick king, and begged that he might go in search of the Water of Life, as it was the only thing that could save him. โ€™No,โ€™ said the king. โ€™I had rather die than place you in such great danger as you must meet with in your journey.โ€™ But he begged so hard that the king let him go; and the prince thought to himself, โ€™If I bring my father this water, he will make me sole heir to his kingdom.โ€™ + +Then he set out: and when he had gone on his way some time he came to a deep valley, overhung with rocks and woods; and as he looked around, he saw standing above him on one of the rocks a little ugly dwarf, with a sugarloaf cap and a scarlet cloak; and the dwarf called to him and said, โ€™Prince, whither so fast?โ€™ โ€™What is that to thee, you ugly imp?โ€™ said the prince haughtily, and rode on. + +But the dwarf was enraged at his behaviour, and laid a fairy spell of ill-luck upon him; so that as he rode on the mountain pass became narrower and narrower, and at last the way was so straitened that he could not go to step forward: and when he thought to have turned his horse round and go back the way he came, he heard a loud laugh ringing round him, and found that the path was closed behind him, so that he was shut in all round. He next tried to get off his horse and make his way on foot, but again the laugh rang in his ears, and he found himself unable to move a step, and thus he was forced to abide spellbound. + +Meantime the old king was lingering on in daily hope of his sonโ€™s return, till at last the second son said, โ€™Father, I will go in search of the Water of Life.โ€™ For he thought to himself, โ€™My brother is surely dead, and the kingdom will fall to me if I find the water.โ€™ The king was at first very unwilling to let him go, but at last yielded to his wish. So he set out and followed the same road which his brother had done, and met with the same elf, who stopped him at the same spot in the mountains, saying, as before, โ€™Prince, prince, whither so fast?โ€™ โ€™Mind your own affairs, busybody!โ€™ said the prince scornfully, and rode on. + +But the dwarf put the same spell upon him as he put on his elder brother, and he, too, was at last obliged to take up his abode in the heart of the mountains. Thus it is with proud silly people, who think themselves above everyone else, and are too proud to ask or take advice. + +When the second prince had thus been gone a long time, the youngest son said he would go and search for the Water of Life, and trusted he should soon be able to make his father well again. So he set out, and the dwarf met him too at the same spot in the valley, among the mountains, and said, โ€™Prince, whither so fast?โ€™ And the prince said, โ€™I am going in search of the Water of Life, because my father is ill, and like to die: can you help me? Pray be kind, and aid me if you can!โ€™ โ€™Do you know where it is to be found?โ€™ asked the dwarf. โ€™No,โ€™ said the prince, โ€™I do not. Pray tell me if you know.โ€™ โ€™Then as you have spoken to me kindly, and are wise enough to seek for advice, I will tell you how and where to go. The water you seek springs from a well in an enchanted castle; and, that you may be able to reach it in safety, I will give you an iron wand and two little loaves of bread; strike the iron door of the castle three times with the wand, and it will open: two hungry lions will be lying down inside gaping for their prey, but if you throw them the bread they will let you pass; then hasten on to the well, and take some of the Water of Life before the clock strikes twelve; for if you tarry longer the door will shut upon you for ever.โ€™ + +Then the prince thanked his little friend with the scarlet cloak for his friendly aid, and took the wand and the bread, and went travelling on and on, over sea and over land, till he came to his journeyโ€™s end, and found everything to be as the dwarf had told him. The door flew open at the third stroke of the wand, and when the lions were quieted he went on through the castle and came at length to a beautiful hall. Around it he saw several knights sitting in a trance; then he pulled off their rings and put them on his own fingers. In another room he saw on a table a sword and a loaf of bread, which he also took. Further on he came to a room where a beautiful young lady sat upon a couch; and she welcomed him joyfully, and said, if he would set her free from the spell that bound her, the kingdom should be his, if he would come back in a year and marry her. Then she told him that the well that held the Water of Life was in the palace gardens; and bade him make haste, and draw what he wanted before the clock struck twelve. + +He walked on; and as he walked through beautiful gardens he came to a delightful shady spot in which stood a couch; and he thought to himself, as he felt tired, that he would rest himself for a while, and gaze on the lovely scenes around him. So he laid himself down, and sleep fell upon him unawares, so that he did not wake up till the clock was striking a quarter to twelve. Then he sprang from the couch dreadfully frightened, ran to the well, filled a cup that was standing by him full of water, and hastened to get away in time. Just as he was going out of the iron door it struck twelve, and the door fell so quickly upon him that it snapped off a piece of his heel. + +When he found himself safe, he was overjoyed to think that he had got the Water of Life; and as he was going on his way homewards, he passed by the little dwarf, who, when he saw the sword and the loaf, said, โ€™You have made a noble prize; with the sword you can at a blow slay whole armies, and the bread will never fail you.โ€™ Then the prince thought to himself, โ€™I cannot go home to my father without my brothersโ€™; so he said, โ€™My dear friend, cannot you tell me where my two brothers are, who set out in search of the Water of Life before me, and never came back?โ€™ โ€™I have shut them up by a charm between two mountains,โ€™ said the dwarf, โ€™because they were proud and ill-behaved, and scorned to ask advice.โ€™ The prince begged so hard for his brothers, that the dwarf at last set them free, though unwillingly, saying, โ€™Beware of them, for they have bad hearts.โ€™ Their brother, however, was greatly rejoiced to see them, and told them all that had happened to him; how he had found the Water of Life, and had taken a cup full of it; and how he had set a beautiful princess free from a spell that bound her; and how she had engaged to wait a whole year, and then to marry him, and to give him the kingdom. + +Then they all three rode on together, and on their way home came to a country that was laid waste by war and a dreadful famine, so that it was feared all must die for want. But the prince gave the king of the land the bread, and all his kingdom ate of it. And he lent the king the wonderful sword, and he slew the enemyโ€™s army with it; and thus the kingdom was once more in peace and plenty. In the same manner he befriended two other countries through which they passed on their way. + +When they came to the sea, they got into a ship and during their voyage the two eldest said to themselves, โ€™Our brother has got the water which we could not find, therefore our father will forsake us and give him the kingdom, which is our rightโ€™; so they were full of envy and revenge, and agreed together how they could ruin him. Then they waited till he was fast asleep, and poured the Water of Life out of the cup, and took it for themselves, giving him bitter sea-water instead. + +When they came to their journeyโ€™s end, the youngest son brought his cup to the sick king, that he might drink and be healed. Scarcely, however, had he tasted the bitter sea-water when he became worse even than he was before; and then both the elder sons came in, and blamed the youngest for what they had done; and said that he wanted to poison their father, but that they had found the Water of Life, and had brought it with them. He no sooner began to drink of what they brought him, than he felt his sickness leave him, and was as strong and well as in his younger days. Then they went to their brother, and laughed at him, and said, โ€™Well, brother, you found the Water of Life, did you? You have had the trouble and we shall have the reward. Pray, with all your cleverness, why did not you manage to keep your eyes open? Next year one of us will take away your beautiful princess, if you do not take care. You had better say nothing about this to our father, for he does not believe a word you say; and if you tell tales, you shall lose your life into the bargain: but be quiet, and we will let you off.โ€™ + +The old king was still very angry with his youngest son, and thought that he really meant to have taken away his life; so he called his court together, and asked what should be done, and all agreed that he ought to be put to death. The prince knew nothing of what was going on, till one day, when the kingโ€™s chief huntsmen went a-hunting with him, and they were alone in the wood together, the huntsman looked so sorrowful that the prince said, โ€™My friend, what is the matter with you?โ€™ โ€™I cannot and dare not tell you,โ€™ said he. But the prince begged very hard, and said, โ€™Only tell me what it is, and do not think I shall be angry, for I will forgive you.โ€™ โ€™Alas!โ€™ said the huntsman; โ€™the king has ordered me to shoot you.โ€™ The prince started at this, and said, โ€™Let me live, and I will change dresses with you; you shall take my royal coat to show to my father, and do you give me your shabby one.โ€™ โ€™With all my heart,โ€™ said the huntsman; โ€™I am sure I shall be glad to save you, for I could not have shot you.โ€™ Then he took the princeโ€™s coat, and gave him the shabby one, and went away through the wood. + +Some time after, three grand embassies came to the old kingโ€™s court, with rich gifts of gold and precious stones for his youngest son; now all these were sent from the three kings to whom he had lent his sword and loaf of bread, in order to rid them of their enemy and feed their people. This touched the old kingโ€™s heart, and he thought his son might still be guiltless, and said to his court, โ€™O that my son were still alive! how it grieves me that I had him killed!โ€™ โ€™He is still alive,โ€™ said the huntsman; โ€™and I am glad that I had pity on him, but let him go in peace, and brought home his royal coat.โ€™ At this the king was overwhelmed with joy, and made it known throughout all his kingdom, that if his son would come back to his court he would forgive him. + +Meanwhile the princess was eagerly waiting till her deliverer should come back; and had a road made leading up to her palace all of shining gold; and told her courtiers that whoever came on horseback, and rode straight up to the gate upon it, was her true lover; and that they must let him in: but whoever rode on one side of it, they must be sure was not the right one; and that they must send him away at once. + +The time soon came, when the eldest brother thought that he would make haste to go to the princess, and say that he was the one who had set her free, and that he should have her for his wife, and the kingdom with her. As he came before the palace and saw the golden road, he stopped to look at it, and he thought to himself, โ€™It is a pity to ride upon this beautiful roadโ€™; so he turned aside and rode on the right-hand side of it. But when he came to the gate, the guards, who had seen the road he took, said to him, he could not be what he said he was, and must go about his business. + +The second prince set out soon afterwards on the same errand; and when he came to the golden road, and his horse had set one foot upon it, he stopped to look at it, and thought it very beautiful, and said to himself, โ€™What a pity it is that anything should tread here!โ€™ Then he too turned aside and rode on the left side of it. But when he came to the gate the guards said he was not the true prince, and that he too must go away about his business; and away he went. + +Now when the full year was come round, the third brother left the forest in which he had lain hid for fear of his fatherโ€™s anger, and set out in search of his betrothed bride. So he journeyed on, thinking of her all the way, and rode so quickly that he did not even see what the road was made of, but went with his horse straight over it; and as he came to the gate it flew open, and the princess welcomed him with joy, and said he was her deliverer, and should now be her husband and lord of the kingdom. When the first joy at their meeting was over, the princess told him she had heard of his father having forgiven him, and of his wish to have him home again: so, before his wedding with the princess, he went to visit his father, taking her with him. Then he told him everything; how his brothers had cheated and robbed him, and yet that he had borne all those wrongs for the love of his father. And the old king was very angry, and wanted to punish his wicked sons; but they made their escape, and got into a ship and sailed away over the wide sea, and where they went to nobody knew and nobody cared. + +And now the old king gathered together his court, and asked all his kingdom to come and celebrate the wedding of his son and the princess. And young and old, noble and squire, gentle and simple, came at once on the summons; and among the rest came the friendly dwarf, with the sugarloaf hat, and a new scarlet cloak. + + And the wedding was held, and the merry bells run. + And all the good people they danced and they sung, + And feasted and frolickโ€™d I canโ€™t tell how long. \ No newline at end of file diff --git a/python/samples/learn_resources/resources/Grimms-The-White-Snake.txt b/python/samples/learn_resources/resources/Grimms-The-White-Snake.txt new file mode 100644 index 000000000000..75ff737109f1 --- /dev/null +++ b/python/samples/learn_resources/resources/Grimms-The-White-Snake.txt @@ -0,0 +1,28 @@ +The White Snake +By the Grimm Brothers + +A long time ago there lived a king who was famed for his wisdom through all the land. Nothing was hidden from him, and it seemed as if news of the most secret things was brought to him through the air. But he had a strange custom; every day after dinner, when the table was cleared, and no one else was present, a trusty servant had to bring him one more dish. It was covered, however, and even the servant did not know what was in it, neither did anyone know, for the king never took off the cover to eat of it until he was quite alone. + +This had gone on for a long time, when one day the servant, who took away the dish, was overcome with such curiosity that he could not help carrying the dish into his room. When he had carefully locked the door, he lifted up the cover, and saw a white snake lying on the dish. But when he saw it he could not deny himself the pleasure of tasting it, so he cut of a little bit and put it into his mouth. No sooner had it touched his tongue than he heard a strange whispering of little voices outside his window. He went and listened, and then noticed that it was the sparrows who were chattering together, and telling one another of all kinds of things which they had seen in the fields and woods. Eating the snake had given him power of understanding the language of animals. + +Now it so happened that on this very day the queen lost her most beautiful ring, and suspicion of having stolen it fell upon this trusty servant, who was allowed to go everywhere. The king ordered the man to be brought before him, and threatened with angry words that unless he could before the morrow point out the thief, he himself should be looked upon as guilty and executed. In vain he declared his innocence; he was dismissed with no better answer. + +In his trouble and fear he went down into the courtyard and took thought how to help himself out of his trouble. Now some ducks were sitting together quietly by a brook and taking their rest; and, whilst they were making their feathers smooth with their bills, they were having a confidential conversation together. The servant stood by and listened. They were telling one another of all the places where they had been waddling about all the morning, and what good food they had found; and one said in a pitiful tone: โ€™Something lies heavy on my stomach; as I was eating in haste I swallowed a ring which lay under the queenโ€™s window.โ€™ The servant at once seized her by the neck, carried her to the kitchen, and said to the cook: โ€™Here is a fine duck; pray, kill her.โ€™ โ€™Yes,โ€™ said the cook, and weighed her in his hand; โ€™she has spared no trouble to fatten herself, and has been waiting to be roasted long enough.โ€™ So he cut off her head, and as she was being dressed for the spit, the queenโ€™s ring was found inside her. + +The servant could now easily prove his innocence; and the king, to make amends for the wrong, allowed him to ask a favour, and promised him the best place in the court that he could wish for. The servant refused everything, and only asked for a horse and some money for travelling, as he had a mind to see the world and go about a little. When his request was granted he set out on his way, and one day came to a pond, where he saw three fishes caught in the reeds and gasping for water. Now, though it is said that fishes are dumb, he heard them lamenting that they must perish so miserably, and, as he had a kind heart, he got off his horse and put the three prisoners back into the water. They leapt with delight, put out their heads, and cried to him: โ€™We will remember you and repay you for saving us!โ€™ + +He rode on, and after a while it seemed to him that he heard a voice in the sand at his feet. He listened, and heard an ant-king complain: โ€™Why cannot folks, with their clumsy beasts, keep off our bodies? That stupid horse, with his heavy hoofs, has been treading down my people without mercy!โ€™ So he turned on to a side path and the ant-king cried out to him: โ€™We will remember youโ€“one good turn deserves another!โ€™ + +The path led him into a wood, and there he saw two old ravens standing by their nest, and throwing out their young ones. โ€™Out with you, you idle, good-for-nothing creatures!โ€™ cried they; โ€™we cannot find food for you any longer; you are big enough, and can provide for yourselves.โ€™ But the poor young ravens lay upon the ground, flapping their wings, and crying: โ€™Oh, what helpless chicks we are! We must shift for ourselves, and yet we cannot fly! What can we do, but lie here and starve?โ€™ So the good young fellow alighted and killed his horse with his sword, and gave it to them for food. Then they came hopping up to it, satisfied their hunger, and cried: โ€™We will remember youโ€“one good turn deserves another!โ€™ + +And now he had to use his own legs, and when he had walked a long way, he came to a large city. There was a great noise and crowd in the streets, and a man rode up on horseback, crying aloud: โ€™The kingโ€™s daughter wants a husband; but whoever seeks her hand must perform a hard task, and if he does not succeed he will forfeit his life.โ€™ Many had already made the attempt, but in vain; nevertheless when the youth saw the kingโ€™s daughter he was so overcome by her great beauty that he forgot all danger, went before the king, and declared himself a suitor. + +So he was led out to the sea, and a gold ring was thrown into it, before his eyes; then the king ordered him to fetch this ring up from the bottom of the sea, and added: โ€™If you come up again without it you will be thrown in again and again until you perish amid the waves.โ€™ All the people grieved for the handsome youth; then they went away, leaving him alone by the sea. + +He stood on the shore and considered what he should do, when suddenly he saw three fishes come swimming towards him, and they were the very fishes whose lives he had saved. The one in the middle held a mussel in its mouth, which it laid on the shore at the youthโ€™s feet, and when he had taken it up and opened it, there lay the gold ring in the shell. Full of joy he took it to the king and expected that he would grant him the promised reward. + +But when the proud princess perceived that he was not her equal in birth, she scorned him, and required him first to perform another task. She went down into the garden and strewed with her own hands ten sacksful of millet-seed on the grass; then she said: โ€™Tomorrow morning before sunrise these must be picked up, and not a single grain be wanting.โ€™ + +The youth sat down in the garden and considered how it might be possible to perform this task, but he could think of nothing, and there he sat sorrowfully awaiting the break of day, when he should be led to death. But as soon as the first rays of the sun shone into the garden he saw all the ten sacks standing side by side, quite full, and not a single grain was missing. The ant-king had come in the night with thousands and thousands of ants, and the grateful creatures had by great industry picked up all the millet-seed and gathered them into the sacks. + +Presently the kingโ€™s daughter herself came down into the garden, and was amazed to see that the young man had done the task she had given him. But she could not yet conquer her proud heart, and said: โ€™Although he has performed both the tasks, he shall not be my husband until he had brought me an apple from the Tree of Life.โ€™ The youth did not know where the Tree of Life stood, but he set out, and would have gone on for ever, as long as his legs would carry him, though he had no hope of finding it. After he had wandered through three kingdoms, he came one evening to a wood, and lay down under a tree to sleep. But he heard a rustling in the branches, and a golden apple fell into his hand. At the same time three ravens flew down to him, perched themselves upon his knee, and said: โ€™We are the three young ravens whom you saved from starving; when we had grown big, and heard that you were seeking the Golden Apple, we flew over the sea to the end of the world, where the Tree of Life stands, and have brought you the apple.โ€™ The youth, full of joy, set out homewards, and took the Golden Apple to the kingโ€™s beautiful daughter, who had now no more excuses left to make. They cut the Apple of Life in two and ate it together; and then her heart became full of love for him, and they lived in undisturbed happiness to a great age. \ No newline at end of file diff --git a/python/samples/learn_resources/resources/PopulationByAdmin1.csv b/python/samples/learn_resources/resources/PopulationByAdmin1.csv new file mode 100644 index 000000000000..7fc6970985da --- /dev/null +++ b/python/samples/learn_resources/resources/PopulationByAdmin1.csv @@ -0,0 +1,744 @@ +UID,iso2,iso3,code3,Province_State,Country_Region,Lat,Long,Combined_Key,Population +5601,BE,BEL,56,Antwerp,Belgium,51.2195,4.4024,"Antwerp, Belgium",1869730 +5602,BE,BEL,56,Brussels,Belgium,50.8503,4.3517,"Brussels, Belgium",1218255 +5603,BE,BEL,56,East Flanders,Belgium,51.0362,3.7373,"East Flanders, Belgium",1525255 +5604,BE,BEL,56,Flemish Brabant,Belgium,50.9167,4.5833,"Flemish Brabant, Belgium",1155843 +5605,BE,BEL,56,Hainaut,Belgium,50.5257,4.0621,"Hainaut, Belgium",1346840 +5606,BE,BEL,56,Liege,Belgium,50.4496,5.8492,"Liege, Belgium",1109800 +5607,BE,BEL,56,Limburg,Belgium,50.9739,5.342,"Limburg, Belgium",877370 +5608,BE,BEL,56,Luxembourg,Belgium,50.0547,5.4677,"Luxembourg, Belgium",286752 +5609,BE,BEL,56,Namur,Belgium,50.331,4.8221,"Namur, Belgium",495832 +5611,BE,BEL,56,Walloon Brabant,Belgium,50.4,4.35,"Walloon Brabant, Belgium",406019 +5612,BE,BEL,56,West Flanders,Belgium,51.0536,3.1458,"West Flanders, Belgium",1200945 +7601,BR,BRA,76,Acre,Brazil,-9.0238,-70.812,"Acre, Brazil",881935 +7602,BR,BRA,76,Alagoas,Brazil,-9.5713,-36.782,"Alagoas, Brazil",3337357 +7603,BR,BRA,76,Amapa,Brazil,0.902,-52.003,"Amapa, Brazil",845731 +7604,BR,BRA,76,Amazonas,Brazil,-3.4168,-65.8561,"Amazonas, Brazil",4144597 +7605,BR,BRA,76,Bahia,Brazil,-12.5797,-41.7007,"Bahia, Brazil",14873064 +7606,BR,BRA,76,Ceara,Brazil,-5.4984,-39.3206,"Ceara, Brazil",9132078 +7607,BR,BRA,76,Distrito Federal,Brazil,-15.7998,-47.8645,"Distrito Federal, Brazil",3015268 +7608,BR,BRA,76,Espirito Santo,Brazil,-19.1834,-40.3089,"Espirito Santo, Brazil",4018650 +7609,BR,BRA,76,Goias,Brazil,-15.827,-49.8362,"Goias, Brazil",7018354 +7610,BR,BRA,76,Maranhao,Brazil,-4.9609,-45.2744,"Maranhao, Brazil",7075181 +7611,BR,BRA,76,Mato Grosso,Brazil,-12.6819,-56.9211,"Mato Grosso, Brazil",3484466 +7612,BR,BRA,76,Mato Grosso do Sul,Brazil,-20.7722,-54.7852,"Mato Grosso do Sul, Brazil",2778986 +7613,BR,BRA,76,Minas Gerais,Brazil,-18.5122,-44.555,"Minas Gerais, Brazil",21168791 +7614,BR,BRA,76,Para,Brazil,-1.9981,-54.9306,"Para, Brazil",8602865 +7615,BR,BRA,76,Paraiba,Brazil,-7.24,-36.782,"Paraiba, Brazil",4018127 +7616,BR,BRA,76,Parana,Brazil,-25.2521,-52.0215,"Parana, Brazil",11433957 +7617,BR,BRA,76,Pernambuco,Brazil,-8.8137,-36.9541,"Pernambuco, Brazil",9557071 +7618,BR,BRA,76,Piaui,Brazil,-7.7183,-42.7289,"Piaui, Brazil",3273227 +7619,BR,BRA,76,Rio de Janeiro,Brazil,-22.9068,-43.1729,"Rio de Janeiro, Brazil",17264943 +7620,BR,BRA,76,Rio Grande do Norte,Brazil,-5.4026,-36.9541,"Rio Grande do Norte, Brazil",3506853 +7621,BR,BRA,76,Rio Grande do Sul,Brazil,-30.0346,-51.2177,"Rio Grande do Sul, Brazil",11377239 +7622,BR,BRA,76,Rondonia,Brazil,-11.5057,-63.5806,"Rondonia, Brazil",1777225 +7623,BR,BRA,76,Roraima,Brazil,-2.7376,-62.0751,"Roraima, Brazil",605761 +7624,BR,BRA,76,Santa Catarina,Brazil,-27.2423,-50.2189,"Santa Catarina, Brazil",7164788 +7625,BR,BRA,76,Sao Paulo,Brazil,-23.5505,-46.6333,"Sao Paulo, Brazil",45919049 +7626,BR,BRA,76,Sergipe,Brazil,-10.5741,-37.3857,"Sergipe, Brazil",2298696 +7627,BR,BRA,76,Tocantins,Brazil,-10.1753,-48.2982,"Tocantins, Brazil",1572866 +15201,CL,CHL,152,Antofagasta,Chile,-23.6509,-70.3975,"Antofagasta, Chile",607534 +15202,CL,CHL,152,Araucania,Chile,-38.9489,-72.3311,"Araucania, Chile",957224 +15203,CL,CHL,152,Arica y Parinacota,Chile,-18.594,-69.4785,"Arica y Parinacota, Chile",226068 +15204,CL,CHL,152,Atacama,Chile,-27.5661,-70.0503,"Atacama, Chile",288944 +15205,CL,CHL,152,Aysen,Chile,-45.9864,-73.7669,"Aysen, Chile",103158 +15206,CL,CHL,152,Biobio,Chile,-37.4464,-72.1416,"Biobio, Chile",1556805 +15207,CL,CHL,152,Coquimbo,Chile,-29.959,-71.3389,"Coquimbo, Chile",757586 +15208,CL,CHL,152,Los Lagos,Chile,-41.9198,-72.1416,"Los Lagos, Chile",828708 +15209,CL,CHL,152,Los Rios,Chile,-40.231,-72.3311,"Los Rios, Chile",384837 +15210,CL,CHL,152,Magallanes,Chile,-52.368,-70.9863,"Magallanes, Chile",166533 +15211,CL,CHL,152,Maule,Chile,-35.5183,-71.6885,"Maule, Chile",1044950 +15212,CL,CHL,152,Metropolitana,Chile,-33.4376,-70.6505,"Metropolitana, Chile",7112808 +15213,CL,CHL,152,Nuble,Chile,-36.7226,-71.7622,"Nuble, Chile",480609 +15214,CL,CHL,152,OHiggins,Chile,-34.5755,-71.0022,"OHiggins, Chile",914555 +15215,CL,CHL,152,Tarapaca,Chile,-19.9232,-69.5132,"Tarapaca, Chile",330558 +15216,CL,CHL,152,Valparaiso,Chile,-33.0472,-71.6127,"Valparaiso, Chile",1815902 +17001,CO,COL,170,Amazonas,Colombia,-1.4429,-71.5724,"Amazonas, Colombia",76589 +17002,CO,COL,170,Antioquia,Colombia,7.1986,-75.3412,"Antioquia, Colombia",6407102 +17003,CO,COL,170,Arauca,Colombia,7.0762,-70.7105,"Arauca, Colombia",262174 +17004,CO,COL,170,Atlantico,Colombia,10.6966,-74.8741,"Atlantico, Colombia",2535517 +17005,CO,COL,170,Bolivar,Colombia,8.6704,-74.03,"Bolivar, Colombia",2070110 +17006,CO,COL,170,Boyaca,Colombia,5.4545,-73.362,"Boyaca, Colombia",1217376 +17007,CO,COL,170,Caldas,Colombia,5.2983,-75.2479,"Caldas, Colombia",998255 +17008,CO,COL,170,Capital District,Colombia,4.711,-74.0721,"Capital District, Colombia",7412566 +17009,CO,COL,170,Caqueta,Colombia,0.8699,-73.8419,"Caqueta, Colombia",401489 +17010,CO,COL,170,Casanare,Colombia,5.7589,-71.5724,"Casanare, Colombia",420504 +17011,CO,COL,170,Cauca,Colombia,2.705,-76.826,"Cauca, Colombia",1464488 +17012,CO,COL,170,Cesar,Colombia,9.3373,-73.6536,"Cesar, Colombia",1200574 +17013,CO,COL,170,Choco,Colombia,5.2528,-76.826,"Choco, Colombia",534826 +17014,CO,COL,170,Cordoba,Colombia,8.0493,-75.574,"Cordoba, Colombia",1784783 +17015,CO,COL,170,Cundinamarca,Colombia,5.026,-74.03,"Cundinamarca, Colombia",2919060 +17016,CO,COL,170,Guainia,Colombia,2.5854,-68.5247,"Guainia, Colombia",48114 +17017,CO,COL,170,Guaviare,Colombia,1.0654,-73.2603,"Guaviare, Colombia",82767 +17018,CO,COL,170,Huila,Colombia,2.5359,-75.5277,"Huila, Colombia",1100386 +17019,CO,COL,170,La Guajira,Colombia,11.3548,-72.5205,"La Guajira, Colombia",880560 +17020,CO,COL,170,Magdalena,Colombia,10.4113,-74.4057,"Magdalena, Colombia",1341746 +17021,CO,COL,170,Meta,Colombia,3.272,-73.0877,"Meta, Colombia",1039722 +17022,CO,COL,170,Narino,Colombia,1.2892,-77.3579,"Narino, Colombia",1630592 +17023,CO,COL,170,Norte de Santander,Colombia,7.9463,-72.8988,"Norte de Santander, Colombia",1491689 +17024,CO,COL,170,Putumayo,Colombia,0.436,-75.5277,"Putumayo, Colombia",348182 +17025,CO,COL,170,Quindio,Colombia,4.461,-75.6674,"Quindio, Colombia",539904 +17026,CO,COL,170,Risaralda,Colombia,5.3158,-75.9928,"Risaralda, Colombia",943401 +17027,CO,COL,170,San Andres y Providencia,Colombia,12.5567,-81.7185,"San Andres y Providencia, Colombia",61280 +17028,CO,COL,170,Santander,Colombia,6.6437,-73.6536,"Santander, Colombia",2184837 +17029,CO,COL,170,Sucre,Colombia,8.814,-74.7233,"Sucre, Colombia",904863 +17030,CO,COL,170,Tolima,Colombia,4.0925,-75.1545,"Tolima, Colombia",1330187 +17031,CO,COL,170,Valle del Cauca,Colombia,3.8009,-76.6413,"Valle del Cauca, Colombia",4475886 +17032,CO,COL,170,Vaupes,Colombia,0.8554,-70.812,"Vaupes, Colombia",40797 +17033,CO,COL,170,Vichada,Colombia,4.4234,-69.2878,"Vichada, Colombia",107808 +234,FO,FRO,234,Faroe Islands,Denmark,61.8926,-6.9118,"Faroe Islands, Denmark",48865 +304,GL,GRL,304,Greenland,Denmark,71.7069,-42.6043,"Greenland, Denmark",56772 +254,GF,GUF,254,French Guiana,France,4,-53,"French Guiana, France",298682 +258,PF,PYF,258,French Polynesia,France,-17.6797,-149.4068,"French Polynesia, France",280904 +312,GP,GLP,312,Guadeloupe,France,16.265,-61.551,"Guadeloupe, France",400127 +474,MQ,MTQ,474,Martinique,France,14.6415,-61.0242,"Martinique, France",375265 +175,YT,MYT,175,Mayotte,France,-12.8275,45.166244,"Mayotte, France",272813 +540,NC,NCL,540,New Caledonia,France,-20.904305,165.618042,"New Caledonia, France",285491 +638,RE,REU,638,Reunion,France,-21.1151,55.5364,"Reunion, France",895308 +652,BL,BLM,652,Saint Barthelemy,France,17.9,-62.8333,"Saint Barthelemy, France",9885 +666,PM,SPM,666,Saint Pierre and Miquelon,France,46.8852,-56.3159,"Saint Pierre and Miquelon, France",5795 +663,MF,MAF,663,St Martin,France,18.0708,-63.0501,"St Martin, France",38659 +876,WF,WLF,876,Wallis and Futuna,France,-14.2938,-178.1165,"Wallis and Futuna, France",15289 +27601,DE,DEU,276,Baden-Wurttemberg,Germany,48.6616,9.3501,"Baden-Wurttemberg, Germany",11103043 +27602,DE,DEU,276,Bayern,Germany,48.7904,11.4979,"Bayern, Germany",13140183 +27603,DE,DEU,276,Berlin,Germany,52.52,13.405,"Berlin, Germany",3664088 +27604,DE,DEU,276,Brandenburg,Germany,52.4125,12.5316,"Brandenburg, Germany",2531071 +27605,DE,DEU,276,Bremen,Germany,53.0793,8.8017,"Bremen, Germany",680130 +27606,DE,DEU,276,Hamburg,Germany,53.5511,9.9937,"Hamburg, Germany",1852478 +27607,DE,DEU,276,Hessen,Germany,50.6521,9.1624,"Hessen, Germany",6293154 +27608,DE,DEU,276,Mecklenburg-Vorpommern,Germany,53.6127,12.4296,"Mecklenburg-Vorpommern, Germany",1610774 +27609,DE,DEU,276,Niedersachsen,Germany,52.6367,9.8451,"Niedersachsen, Germany",8003421 +27610,DE,DEU,276,Nordrhein-Westfalen,Germany,51.4332,7.6616,"Nordrhein-Westfalen, Germany",17925570 +27611,DE,DEU,276,Rheinland-Pfalz,Germany,50.1183,7.309,"Rheinland-Pfalz, Germany",4098391 +27612,DE,DEU,276,Saarland,Germany,49.3964,7.023,"Saarland, Germany",983991 +27613,DE,DEU,276,Sachsen,Germany,51.1045,13.2017,"Sachsen, Germany",4056941 +27614,DE,DEU,276,Sachsen-Anhalt,Germany,51.9503,11.6923,"Sachsen-Anhalt, Germany",2180684 +27615,DE,DEU,276,Schleswig-Holstein,Germany,54.2194,9.6961,"Schleswig-Holstein, Germany",2910875 +27616,DE,DEU,276,Thuringen,Germany,51.011,10.8453,"Thuringen, Germany",2120237 +35601,IN,IND,356,Andaman and Nicobar Islands,India,11.225999,92.968178,"Andaman and Nicobar Islands, India",417036 +35602,IN,IND,356,Andhra Pradesh,India,15.9129,79.74,"Andhra Pradesh, India",53903393 +35603,IN,IND,356,Arunachal Pradesh,India,27.768456,96.384277,"Arunachal Pradesh, India",1570458 +35604,IN,IND,356,Assam,India,26.357149,92.830441,"Assam, India",35607039 +35605,IN,IND,356,Bihar,India,25.679658,85.60484,"Bihar, India",124799926 +35606,IN,IND,356,Chandigarh,India,30.733839,76.768278,"Chandigarh, India",1158473 +35607,IN,IND,356,Chhattisgarh,India,21.264705,82.035366,"Chhattisgarh, India",29436231 +35608,IN,IND,356,Dadra and Nagar Haveli and Daman and Diu,India,20.194742,73.080901,"Dadra and Nagar Haveli and Daman and Diu, India",615724 +35609,IN,IND,356,Delhi,India,28.646519,77.10898,"Delhi, India",18710922 +35610,IN,IND,356,Goa,India,15.359682,74.057396,"Goa, India",1586250 +35611,IN,IND,356,Gujarat,India,22.694884,71.590923,"Gujarat, India",63872399 +35612,IN,IND,356,Haryana,India,29.20004,76.332824,"Haryana, India",28204692 +35613,IN,IND,356,Himachal Pradesh,India,31.927213,77.233081,"Himachal Pradesh, India",7451955 +35614,IN,IND,356,Jammu and Kashmir,India,33.75943,76.612638,"Jammu and Kashmir, India",13606320 +35615,IN,IND,356,Jharkhand,India,23.654536,85.557631,"Jharkhand, India",38593948 +35616,IN,IND,356,Karnataka,India,14.70518,76.166436,"Karnataka, India",67562686 +35617,IN,IND,356,Kerala,India,10.450898,76.405749,"Kerala, India",35699443 +35618,IN,IND,356,Ladakh,India,34.1526,77.5771,"Ladakh, India",274289 +35619,IN,IND,356,Madhya Pradesh,India,23.541513,78.289633,"Madhya Pradesh, India",85358965 +35620,IN,IND,356,Maharashtra,India,19.449759,76.108221,"Maharashtra, India",123144223 +35621,IN,IND,356,Manipur,India,24.738975,93.882541,"Manipur, India",3091545 +35622,IN,IND,356,Meghalaya,India,25.536934,91.278882,"Meghalaya, India",3366710 +35623,IN,IND,356,Mizoram,India,23.309381,92.83822,"Mizoram, India",1239244 +35624,IN,IND,356,Nagaland,India,26.06702,94.470302,"Nagaland, India",2249695 +35625,IN,IND,356,Odisha,India,20.505428,84.418059,"Odisha, India",46356334 +35626,IN,IND,356,Puducherry,India,11.882658,78.86498,"Puducherry, India",1413542 +35627,IN,IND,356,Punjab,India,30.841465,75.40879,"Punjab, India",30141373 +35628,IN,IND,356,Rajasthan,India,26.583423,73.847973,"Rajasthan, India",81032689 +35629,IN,IND,356,Sikkim,India,27.571671,88.472712,"Sikkim, India",690251 +35630,IN,IND,356,Tamil Nadu,India,11.006091,78.400624,"Tamil Nadu, India",77841267 +35631,IN,IND,356,Telangana,India,18.1124,79.0193,"Telangana, India",39362732 +35632,IN,IND,356,Tripura,India,23.746783,91.743565,"Tripura, India",4169794 +35633,IN,IND,356,Uttar Pradesh,India,26.925425,80.560982,"Uttar Pradesh, India",237882725 +35634,IN,IND,356,Uttarakhand,India,30.156447,79.197608,"Uttarakhand, India",11250858 +35635,IN,IND,356,West Bengal,India,23.814082,87.979803,"West Bengal, India",99609303 +35637,IN,IND,356,Lakshadweep,India,13.6999972,72.1833326,"Lakshadweep, India",64429 +38013,IT,ITA,380,Abruzzo,Italy,42.35122196,13.39843823,"Abruzzo, Italy",1311580 +38017,IT,ITA,380,Basilicata,Italy,40.63947052,15.80514834,"Basilicata, Italy",562869 +38018,IT,ITA,380,Calabria,Italy,38.90597598,16.59440194,"Calabria, Italy",1947131 +38015,IT,ITA,380,Campania,Italy,40.83956555,14.25084984,"Campania, Italy",5801692 +38008,IT,ITA,380,Emilia-Romagna,Italy,44.49436681,11.3417208,"Emilia-Romagna, Italy",4459477 +38006,IT,ITA,380,Friuli Venezia Giulia,Italy,45.6494354,13.76813649,"Friuli Venezia Giulia, Italy",1215220 +38012,IT,ITA,380,Lazio,Italy,41.89277044,12.48366722,"Lazio, Italy",5879082 +38007,IT,ITA,380,Liguria,Italy,44.41149315,8.9326992,"Liguria, Italy",1550640 +38003,IT,ITA,380,Lombardia,Italy,45.46679409,9.190347404,"Lombardia, Italy",10060574 +38011,IT,ITA,380,Marche,Italy,43.61675973,13.5188753,"Marche, Italy",1525271 +38014,IT,ITA,380,Molise,Italy,41.55774754,14.65916051,"Molise, Italy",305617 +38041,IT,ITA,380,P.A. Bolzano,Italy,46.49933453,11.35662422,"P.A. Bolzano, Italy",532318 +38042,IT,ITA,380,P.A. Trento,Italy,46.06893511,11.12123097,"P.A. Trento, Italy",541418 +38001,IT,ITA,380,Piemonte,Italy,45.0732745,7.680687483,"Piemonte, Italy",4356406 +38016,IT,ITA,380,Puglia,Italy,41.12559576,16.86736689,"Puglia, Italy",4029053 +38020,IT,ITA,380,Sardegna,Italy,39.21531192,9.110616306,"Sardegna, Italy",1639591 +38019,IT,ITA,380,Sicilia,Italy,38.11569725,13.3623567,"Sicilia, Italy",4999891 +38009,IT,ITA,380,Toscana,Italy,43.76923077,11.25588885,"Toscana, Italy",3729641 +38010,IT,ITA,380,Umbria,Italy,43.10675841,12.38824698,"Umbria, Italy",882015 +38002,IT,ITA,380,Valle d'Aosta,Italy,45.73750286,7.320149366,"Valle d'Aosta, Italy",125666 +38005,IT,ITA,380,Veneto,Italy,45.43490485,12.33845213,"Veneto, Italy",4905854 +39201,JP,JPN,392,Aichi,Japan,35.035551,137.211621,"Aichi, Japan",7552239 +39202,JP,JPN,392,Akita,Japan,39.748679,140.408228,"Akita, Japan",966490 +39203,JP,JPN,392,Aomori,Japan,40.781541,140.828896,"Aomori, Japan",1246371 +39204,JP,JPN,392,Chiba,Japan,35.510141,140.198917,"Chiba, Japan",6259382 +39205,JP,JPN,392,Ehime,Japan,33.624835,132.856842,"Ehime, Japan",1339215 +39206,JP,JPN,392,Fukui,Japan,35.846614,136.224654,"Fukui, Japan",767937 +39207,JP,JPN,392,Fukuoka,Japan,33.526032,130.666949,"Fukuoka, Japan",5103679 +39208,JP,JPN,392,Fukushima,Japan,37.378867,140.223295,"Fukushima, Japan",1845519 +39209,JP,JPN,392,Gifu,Japan,35.778671,137.055925,"Gifu, Japan",1986587 +39210,JP,JPN,392,Gunma,Japan,36.504479,138.985605,"Gunma, Japan",1942456 +39211,JP,JPN,392,Hiroshima,Japan,34.605309,132.788719,"Hiroshima, Japan",2804177 +39212,JP,JPN,392,Hokkaido,Japan,43.385711,142.552318,"Hokkaido, Japan",5250049 +39213,JP,JPN,392,Hyogo,Japan,35.039913,134.828057,"Hyogo, Japan",5466190 +39214,JP,JPN,392,Ibaraki,Japan,36.303588,140.319591,"Ibaraki, Japan",2860307 +39215,JP,JPN,392,Ishikawa,Japan,36.769464,136.771027,"Ishikawa, Japan",1137649 +39216,JP,JPN,392,Iwate,Japan,39.593287,141.361777,"Iwate, Japan",1226816 +39217,JP,JPN,392,Kagawa,Japan,34.217292,133.969047,"Kagawa, Japan",956347 +39218,JP,JPN,392,Kagoshima,Japan,31.009484,130.430665,"Kagoshima, Japan",1602273 +39219,JP,JPN,392,Kanagawa,Japan,35.415312,139.338983,"Kanagawa, Japan",9198268 +39220,JP,JPN,392,Kochi,Japan,33.422519,133.367307,"Kochi, Japan",698029 +39221,JP,JPN,392,Kumamoto,Japan,32.608154,130.745231,"Kumamoto, Japan",1747567 +39222,JP,JPN,392,Kyoto,Japan,35.253815,135.443341,"Kyoto, Japan",2582957 +39223,JP,JPN,392,Mie,Japan,34.508018,136.376013,"Mie, Japan",1780882 +39224,JP,JPN,392,Miyagi,Japan,38.446859,140.927086,"Miyagi, Japan",2306365 +39225,JP,JPN,392,Miyazaki,Japan,32.193204,131.299374,"Miyazaki, Japan",1073301 +39226,JP,JPN,392,Nagano,Japan,36.132134,138.045528,"Nagano, Japan",2048790 +39227,JP,JPN,392,Nagasaki,Japan,33.235712,129.608033,"Nagasaki, Japan",1326524 +39228,JP,JPN,392,Nara,Japan,34.317451,135.871644,"Nara, Japan",1330123 +39229,JP,JPN,392,Niigata,Japan,37.521819,138.918647,"Niigata, Japan",2223106 +39230,JP,JPN,392,Oita,Japan,33.200697,131.43324,"Oita, Japan",1135434 +39231,JP,JPN,392,Okayama,Japan,34.89246,133.826252,"Okayama, Japan",1889586 +39232,JP,JPN,392,Okinawa,Japan,25.768923,126.668016,"Okinawa, Japan",1453168 +39233,JP,JPN,392,Osaka,Japan,34.620965,135.507481,"Osaka, Japan",8809363 +39234,JP,JPN,392,Saga,Japan,33.286977,130.115738,"Saga, Japan",814711 +39235,JP,JPN,392,Saitama,Japan,35.997101,139.347635,"Saitama, Japan",7349693 +39236,JP,JPN,392,Shiga,Japan,35.215827,136.138064,"Shiga, Japan",1413943 +39237,JP,JPN,392,Shimane,Japan,35.07076,132.554064,"Shimane, Japan",674346 +39238,JP,JPN,392,Shizuoka,Japan,34.916975,138.407784,"Shizuoka, Japan",3643528 +39239,JP,JPN,392,Tochigi,Japan,36.689912,139.819213,"Tochigi, Japan",1933990 +39240,JP,JPN,392,Tokushima,Japan,33.919178,134.242091,"Tokushima, Japan",727977 +39241,JP,JPN,392,Tokyo,Japan,35.711343,139.446921,"Tokyo, Japan",13920663 +39242,JP,JPN,392,Tottori,Japan,35.359069,133.863619,"Tottori, Japan",555558 +39243,JP,JPN,392,Toyama,Japan,36.637464,137.269346,"Toyama, Japan",1043502 +39244,JP,JPN,392,Wakayama,Japan,33.911879,135.505446,"Wakayama, Japan",924933 +39245,JP,JPN,392,Yamagata,Japan,38.448396,140.102154,"Yamagata, Japan",1077666 +39246,JP,JPN,392,Yamaguchi,Japan,34.20119,131.573293,"Yamaguchi, Japan",1358336 +39247,JP,JPN,392,Yamanashi,Japan,35.612364,138.611489,"Yamanashi, Japan",810956 +45801,MY,MYS,458,Johor,Malaysia,1.4854,103.7618,"Johor, Malaysia",3768200 +45802,MY,MYS,458,Kedah,Malaysia,6.1184,100.3685,"Kedah, Malaysia",2185900 +45803,MY,MYS,458,Kelantan,Malaysia,6.1254,102.2381,"Kelantan, Malaysia",1892200 +45804,MY,MYS,458,Melaka,Malaysia,2.1896,102.2501,"Melaka, Malaysia",932700 +45805,MY,MYS,458,Negeri Sembilan,Malaysia,2.7258,101.9424,"Negeri Sembilan, Malaysia",1132100 +45806,MY,MYS,458,Pahang,Malaysia,3.8126,103.3256,"Pahang, Malaysia",1677100 +45807,MY,MYS,458,Perak,Malaysia,4.5921,101.0901,"Perak, Malaysia",2514300 +45808,MY,MYS,458,Perlis,Malaysia,6.4449,100.2048,"Perlis, Malaysia",254600 +45809,MY,MYS,458,Pulau Pinang,Malaysia,5.4141,100.3288,"Pulau Pinang, Malaysia",1777600 +45810,MY,MYS,458,Sabah,Malaysia,5.9788,116.0753,"Sabah, Malaysia",3904700 +45811,MY,MYS,458,Sarawak,Malaysia,1.5533,110.3592,"Sarawak, Malaysia",2818100 +45812,MY,MYS,458,Selangor,Malaysia,3.0738,101.5183,"Selangor, Malaysia",6541900 +45813,MY,MYS,458,Terengganu,Malaysia,5.3117,103.1324,"Terengganu, Malaysia",1250100 +45814,MY,MYS,458,W.P. Kuala Lumpur,Malaysia,3.139,101.6869,"W.P. Kuala Lumpur, Malaysia",1778400 +45815,MY,MYS,458,W.P. Labuan,Malaysia,5.2831,115.2308,"W.P. Labuan, Malaysia",99400 +45816,MY,MYS,458,W.P. Putrajaya,Malaysia,2.9264,101.6964,"W.P. Putrajaya, Malaysia",105400 +48401,MX,MEX,484,Aguascalientes,Mexico,21.8853,-102.2916,"Aguascalientes, Mexico",1434635 +48402,MX,MEX,484,Baja California,Mexico,30.8406,-115.2838,"Baja California, Mexico",3634868 +48403,MX,MEX,484,Baja California Sur,Mexico,26.0444,-111.6661,"Baja California Sur, Mexico",804708 +48404,MX,MEX,484,Campeche,Mexico,19.8301,-90.5349,"Campeche, Mexico",1000617 +48405,MX,MEX,484,Chiapas,Mexico,16.7569,-93.1292,"Chiapas, Mexico",5730367 +48406,MX,MEX,484,Chihuahua,Mexico,28.633,-106.0691,"Chihuahua, Mexico",3801487 +48407,MX,MEX,484,Ciudad de Mexico,Mexico,19.4326,-99.1332,"Ciudad de Mexico, Mexico",9018645 +48408,MX,MEX,484,Coahuila,Mexico,27.0587,-101.7068,"Coahuila, Mexico",3218720 +48409,MX,MEX,484,Colima,Mexico,19.1223,-104.0072,"Colima, Mexico",785153 +48410,MX,MEX,484,Durango,Mexico,24.5593,-104.6588,"Durango, Mexico",1868996 +48411,MX,MEX,484,Guanajuato,Mexico,21.019,-101.2574,"Guanajuato, Mexico",6228175 +48412,MX,MEX,484,Guerrero,Mexico,17.4392,-99.5451,"Guerrero, Mexico",3657048 +48413,MX,MEX,484,Hidalgo,Mexico,20.0911,-98.7624,"Hidalgo, Mexico",3086414 +48414,MX,MEX,484,Jalisco,Mexico,20.6595,-103.3494,"Jalisco, Mexico",8409693 +48415,MX,MEX,484,Mexico,Mexico,19.4969,-99.7233,"Mexico, Mexico",17427790 +48416,MX,MEX,484,Michoacan,Mexico,19.5665,-101.7068,"Michoacan, Mexico",4825401 +48417,MX,MEX,484,Morelos,Mexico,18.6813,-99.1013,"Morelos, Mexico",2044058 +48418,MX,MEX,484,Nayarit,Mexico,21.7514,-104.8455,"Nayarit, Mexico",1288571 +48419,MX,MEX,484,Nuevo Leon,Mexico,25.5922,-99.9962,"Nuevo Leon, Mexico",5610153 +48420,MX,MEX,484,Oaxaca,Mexico,17.0732,-96.7266,"Oaxaca, Mexico",4143593 +48421,MX,MEX,484,Puebla,Mexico,19.0414,-98.2063,"Puebla, Mexico",6604451 +48422,MX,MEX,484,Queretaro,Mexico,20.5888,-100.3899,"Queretaro, Mexico",2279637 +48423,MX,MEX,484,Quintana Roo,Mexico,19.1817,-88.4791,"Quintana Roo, Mexico",1723259 +48424,MX,MEX,484,San Luis Potosi,Mexico,22.1565,-100.9855,"San Luis Potosi, Mexico",2866142 +48425,MX,MEX,484,Sinaloa,Mexico,25.1721,-107.4795,"Sinaloa, Mexico",3156674 +48426,MX,MEX,484,Sonora,Mexico,29.2972,-110.3309,"Sonora, Mexico",3074745 +48427,MX,MEX,484,Tabasco,Mexico,17.8409,-92.6189,"Tabasco, Mexico",2572287 +48428,MX,MEX,484,Tamaulipas,Mexico,24.2669,-98.8363,"Tamaulipas, Mexico",3650602 +48429,MX,MEX,484,Tlaxcala,Mexico,19.3139,-98.2404,"Tlaxcala, Mexico",1380011 +48430,MX,MEX,484,Veracruz,Mexico,19.1738,-96.1342,"Veracruz, Mexico",8539862 +48431,MX,MEX,484,Yucatan,Mexico,20.7099,-89.0943,"Yucatan, Mexico",2259098 +48432,MX,MEX,484,Zacatecas,Mexico,22.7709,-102.5832,"Zacatecas, Mexico",1666426 +49801,MD,MDA,498,Anenii Noi,Moldova,46.8833,29.2167,"Anenii Noi, Moldova",81710 +49802,MD,MDA,498,Balti,Moldova,47.754,27.9184,"Balti, Moldova",127561 +49803,MD,MDA,498,Basarabeasca,Moldova,46.3333,28.9667,"Basarabeasca, Moldova",28978 +49804,MD,MDA,498,Bender,Moldova,46.8228,29.462,"Bender, Moldova",91197 +49805,MD,MDA,498,Briceni,Moldova,48.36,27.0858,"Briceni, Moldova",78027 +49806,MD,MDA,498,Cahul,Moldova,45.9167,28.1833,"Cahul, Moldova",119231 +49807,MD,MDA,498,Calarasi,Moldova,47.25,28.3,"Calarasi, Moldova",75075 +49808,MD,MDA,498,Camenca,Moldova,48.0319,28.6978,"Camenca, Moldova",8871 +49809,MD,MDA,498,Cantemir,Moldova,46.2854,28.1979,"Cantemir, Moldova",60001 +49810,MD,MDA,498,Causeni,Moldova,46.6333,29.4,"Causeni, Moldova",90612 +49811,MD,MDA,498,Ceadir-Lunga,Moldova,46.057,28.826,"Ceadir-Lunga, Moldova",16605 +49812,MD,MDA,498,Chisinau,Moldova,47.0105,28.8638,"Chisinau, Moldova",712218 +49813,MD,MDA,498,Cimislia,Moldova,46.5289,28.7838,"Cimislia, Moldova",60925 +49814,MD,MDA,498,Comrat,Moldova,46.2956,28.6549,"Comrat, Moldova",72254 +49815,MD,MDA,498,Criuleni,Moldova,47.212,29.1617,"Criuleni, Moldova",46442 +49816,MD,MDA,498,Donduseni,Moldova,48.2372,27.6104,"Donduseni, Moldova",87092 +49817,MD,MDA,498,Drochia,Moldova,48.0333,27.75,"Drochia, Moldova",43015 +49818,MD,MDA,498,Dubasari,Moldova,47.267,29.167,"Dubasari, Moldova",28500 +49819,MD,MDA,498,Edinet,Moldova,48.1667,27.3167,"Edinet, Moldova",90320 +49820,MD,MDA,498,Falesti,Moldova,47.5,27.72,"Falesti, Moldova",89389 +49821,MD,MDA,498,Floresti,Moldova,47.8933,28.3014,"Floresti, Moldova",155646 +49822,MD,MDA,498,Glodeni,Moldova,47.7667,27.5167,"Glodeni, Moldova",119762 +49823,MD,MDA,498,Grigoriopol,Moldova,47.1536,29.2964,"Grigoriopol, Moldova",9381 +49824,MD,MDA,498,Hincesti,Moldova,46.8167,28.5833,"Hincesti, Moldova",97704 +49825,MD,MDA,498,Ialoveni,Moldova,46.9439,28.7772,"Ialoveni, Moldova",51056 +49826,MD,MDA,498,Leova,Moldova,46.4806,28.2644,"Leova, Moldova",64924 +49827,MD,MDA,498,Nisporeni,Moldova,47.0833,28.1833,"Nisporeni, Moldova",56510 +49828,MD,MDA,498,Ocnita,Moldova,48.4061,27.4859,"Ocnita, Moldova",116271 +49829,MD,MDA,498,Orhei,Moldova,47.3735,28.822,"Orhei, Moldova",48105 +49830,MD,MDA,498,Rezina,Moldova,47.7333,28.95,"Rezina, Moldova",69454 +49831,MD,MDA,498,Ribnita,Moldova,47.7667,29,"Ribnita, Moldova",47949 +49832,MD,MDA,498,Riscani,Moldova,47.9679,27.5565,"Riscani, Moldova",87153 +49833,MD,MDA,498,Singerei,Moldova,47.6333,28.15,"Singerei, Moldova",42227 +49834,MD,MDA,498,Slobozia,Moldova,46.7333,29.7,"Slobozia, Moldova",14618 +49835,MD,MDA,498,Soldanesti,Moldova,47.8167,28.8,"Soldanesti, Moldova",94986 +49836,MD,MDA,498,Soroca,Moldova,48.1618,28.3011,"Soroca, Moldova",70594 +49837,MD,MDA,498,Stefan Voda,Moldova,46.5153,29.5297,"Stefan Voda, Moldova",88900 +49838,MD,MDA,498,Straseni,Moldova,47.1333,28.6167,"Straseni, Moldova",43154 +49839,MD,MDA,498,Taraclia,Moldova,45.9,28.6667,"Taraclia, Moldova",70126 +49840,MD,MDA,498,Telenesti,Moldova,47.5032,28.3535,"Telenesti, Moldova",383806 +49841,MD,MDA,498,Tiraspol,Moldova,46.85,29.6333,"Tiraspol, Moldova",133807 +49842,MD,MDA,498,Transnistria,Moldova,47.2153,29.463,"Transnistria, Moldova",110545 +49843,MD,MDA,498,Ungheni,Moldova,47.2077,27.8073,"Ungheni, Moldova",30804 +49844,MD,MDA,498,Vulcanesti,Moldova,45.6833,28.4042,"Vulcanesti, Moldova",12185 +52801,NL,NLD,528,Drenthe,Netherlands,52.862485,6.618435,"Drenthe, Netherlands",493682 +52802,NL,NLD,528,Flevoland,Netherlands,52.550383,5.515162,"Flevoland, Netherlands",423021 +52803,NL,NLD,528,Friesland,Netherlands,53.087337,5.7925,"Friesland, Netherlands",649957 +52804,NL,NLD,528,Gelderland,Netherlands,52.061738,5.939114,"Gelderland, Netherlands",2085952 +52805,NL,NLD,528,Groningen,Netherlands,53.217922,6.741514,"Groningen, Netherlands",585866 +52806,NL,NLD,528,Limburg,Netherlands,51.209227,5.93387,"Limburg, Netherlands",1117201 +52807,NL,NLD,528,Noord-Brabant,Netherlands,51.561174,5.184942,"Noord-Brabant, Netherlands",2562955 +52808,NL,NLD,528,Noord-Holland,Netherlands,52.600906,4.918688,"Noord-Holland, Netherlands",2879527 +52809,NL,NLD,528,Overijssel,Netherlands,52.444558,6.441722,"Overijssel, Netherlands",1162406 +52810,NL,NLD,528,Utrecht,Netherlands,52.084251,5.163824,"Utrecht, Netherlands",1354834 +52811,NL,NLD,528,Zeeland,Netherlands,51.47936,3.861559,"Zeeland, Netherlands",383488 +52812,NL,NLD,528,Zuid-Holland,Netherlands,51.937835,4.462114,"Zuid-Holland, Netherlands",3708696 +533,AW,ABW,533,Aruba,Netherlands,12.5211,-69.9683,"Aruba, Netherlands",106766 +531,CW,CUW,531,Curacao,Netherlands,12.1696,-68.99,"Curacao, Netherlands",164100 +534,SX,SXM,534,Sint Maarten,Netherlands,18.0425,-63.0548,"Sint Maarten, Netherlands",42882 +535,BQ,BES,535,"Bonaire, Sint Eustatius and Saba",Netherlands,12.1784,-68.2385,"Bonaire, Sint Eustatius and Saba, Netherlands",26221 +184,CK,COK,184,Cook Islands,New Zealand,-21.2367,-159.7777,"Cook Islands, New Zealand",17459 +570,NU,NIU,570,Niue,New Zealand,-19.0544,-169.8672,"Niue, New Zealand",1650 +56601,NG,NGA,566,Abia,Nigeria,5.4527,7.5248,"Abia, Nigeria",3727347 +56602,NG,NGA,566,Adamawa,Nigeria,9.3265,12.3984,"Adamawa, Nigeria",4248436 +56603,NG,NGA,566,Akwa Ibom,Nigeria,4.9057,7.8537,"Akwa Ibom, Nigeria",5482177 +56604,NG,NGA,566,Anambra,Nigeria,6.2209,6.937,"Anambra, Nigeria",5527809 +56605,NG,NGA,566,Bauchi,Nigeria,10.7761,9.9992,"Bauchi, Nigeria",6537314 +56606,NG,NGA,566,Bayelsa,Nigeria,4.7719,6.0699,"Bayelsa, Nigeria",2277961 +56607,NG,NGA,566,Benue,Nigeria,7.3369,8.7404,"Benue, Nigeria",5741815 +56608,NG,NGA,566,Borno,Nigeria,11.8846,13.152,"Borno, Nigeria",5860183 +56609,NG,NGA,566,Cross River,Nigeria,5.8702,8.5988,"Cross River, Nigeria",3866269 +56610,NG,NGA,566,Delta,Nigeria,5.704,5.9339,"Delta, Nigeria",5663362 +56611,NG,NGA,566,Ebonyi,Nigeria,6.2649,8.0137,"Ebonyi, Nigeria",2880383 +56612,NG,NGA,566,Edo,Nigeria,6.6342,5.9304,"Edo, Nigeria",4235595 +56613,NG,NGA,566,Ekiti,Nigeria,7.719,5.311,"Ekiti, Nigeria",3270798 +56614,NG,NGA,566,Enugu,Nigeria,6.5364,7.4356,"Enugu, Nigeria",4411119 +56615,NG,NGA,566,Federal Capital Territory,Nigeria,8.8941,7.186,"Federal Capital Territory, Nigeria",3564126 +56616,NG,NGA,566,Gombe,Nigeria,10.3638,11.1928,"Gombe, Nigeria",3256962 +56617,NG,NGA,566,Imo,Nigeria,5.572,7.0588,"Imo, Nigeria",5408756 +56618,NG,NGA,566,Jigawa,Nigeria,12.228,9.5616,"Jigawa, Nigeria",5828163 +56619,NG,NGA,566,Kaduna,Nigeria,10.3764,7.7095,"Kaduna, Nigeria",8252366 +56620,NG,NGA,566,Kano,Nigeria,11.7471,8.5247,"Kano, Nigeria",13076892 +56621,NG,NGA,566,Katsina,Nigeria,12.3797,7.6306,"Katsina, Nigeria",7831319 +56622,NG,NGA,566,Kebbi,Nigeria,11.4942,4.2333,"Kebbi, Nigeria",4440050 +56623,NG,NGA,566,Kogi,Nigeria,7.7337,6.6906,"Kogi, Nigeria",4473490 +56624,NG,NGA,566,Kwara,Nigeria,8.9669,4.3874,"Kwara, Nigeria",3192893 +56625,NG,NGA,566,Lagos,Nigeria,6.5236,3.6006,"Lagos, Nigeria",12550598 +56626,NG,NGA,566,Nasarawa,Nigeria,8.4998,8.1997,"Nasarawa, Nigeria",2523395 +56627,NG,NGA,566,Niger,Nigeria,9.9309,5.5983,"Niger, Nigeria",5556247 +56628,NG,NGA,566,Ogun,Nigeria,6.998,3.4737,"Ogun, Nigeria",5217716 +56629,NG,NGA,566,Ondo,Nigeria,6.9149,5.1478,"Ondo, Nigeria",4671695 +56630,NG,NGA,566,Osun,Nigeria,7.5629,4.52,"Osun, Nigeria",4705589 +56631,NG,NGA,566,Oyo,Nigeria,8.1574,3.6147,"Oyo, Nigeria",7840864 +56632,NG,NGA,566,Plateau,Nigeria,9.2182,9.5179,"Plateau, Nigeria",4200442 +56633,NG,NGA,566,Rivers,Nigeria,4.8396,6.9112,"Rivers, Nigeria",7303924 +56634,NG,NGA,566,Sokoto,Nigeria,13.0533,5.3223,"Sokoto, Nigeria",4998090 +56635,NG,NGA,566,Taraba,Nigeria,7.9994,10.774,"Taraba, Nigeria",3066834 +56636,NG,NGA,566,Yobe,Nigeria,12.2939,11.439,"Yobe, Nigeria",3294137 +56637,NG,NGA,566,Zamfara,Nigeria,12.1222,6.2236,"Zamfara, Nigeria",4515427 +58601,PK,PAK,586,Azad Jammu and Kashmir,Pakistan,34.027401,73.947253,"Azad Jammu and Kashmir, Pakistan",4045366 +58602,PK,PAK,586,Balochistan,Pakistan,28.328492,65.898403,"Balochistan, Pakistan",12344408 +58603,PK,PAK,586,Gilgit-Baltistan,Pakistan,35.792146,74.982138,"Gilgit-Baltistan, Pakistan",1013584 +58604,PK,PAK,586,Islamabad,Pakistan,33.665087,73.121219,"Islamabad, Pakistan",2006572 +58605,PK,PAK,586,Khyber Pakhtunkhwa,Pakistan,34.485332,72.09169,"Khyber Pakhtunkhwa, Pakistan",30523371 +58606,PK,PAK,586,Punjab,Pakistan,30.811346,72.139132,"Punjab, Pakistan",110012442 +58607,PK,PAK,586,Sindh,Pakistan,26.009446,68.776807,"Sindh, Pakistan",47886051 +60401,PE,PER,604,Amazonas,Peru,-5.077253,-78.050172,"Amazonas, Peru",426800 +60402,PE,PER,604,Ancash,Peru,-9.407125,-77.671795,"Ancash, Peru",1180600 +60403,PE,PER,604,Apurimac,Peru,-14.027713,-72.975378,"Apurimac, Peru",430700 +60404,PE,PER,604,Arequipa,Peru,-15.843524,-72.475539,"Arequipa, Peru",1497400 +60405,PE,PER,604,Ayacucho,Peru,-14.091648,-74.08344,"Ayacucho, Peru",668200 +60406,PE,PER,604,Cajamarca,Peru,-6.430284,-78.745596,"Cajamarca, Peru",1453700 +60407,PE,PER,604,Callao,Peru,-11.954609,-77.136042,"Callao, Peru",1129900 +60408,PE,PER,604,Cusco,Peru,-13.191068,-72.153609,"Cusco, Peru",1357100 +60409,PE,PER,604,Huancavelica,Peru,-13.023888,-75.00277,"Huancavelica, Peru",365300 +60410,PE,PER,604,Huanuco,Peru,-9.421676,-76.040642,"Huanuco, Peru",760300 +60411,PE,PER,604,Ica,Peru,-14.235097,-75.574821,"Ica, Peru",975200 +60412,PE,PER,604,Junin,Peru,-11.541783,-74.876968,"Junin, Peru",1361500 +60413,PE,PER,604,La Libertad,Peru,-7.92139,-78.370238,"La Libertad, Peru",2016800 +60414,PE,PER,604,Lambayeque,Peru,-6.353049,-79.824113,"Lambayeque, Peru",1310800 +60415,PE,PER,604,Lima,Peru,-11.766533,-76.604498,"Lima, Peru",10628500 +60416,PE,PER,604,Loreto,Peru,-4.124847,-74.424115,"Loreto, Peru",1027600 +60417,PE,PER,604,Madre de Dios,Peru,-11.972699,-70.53172,"Madre de Dios, Peru",173800 +60418,PE,PER,604,Moquegua,Peru,-16.860271,-70.839046,"Moquegua, Peru",192700 +60419,PE,PER,604,Pasco,Peru,-10.39655,-75.307635,"Pasco, Peru",271900 +60420,PE,PER,604,Piura,Peru,-5.133361,-80.335861,"Piura, Peru",2048000 +60421,PE,PER,604,Puno,Peru,-14.995827,-69.922726,"Puno, Peru",1238000 +60422,PE,PER,604,San Martin,Peru,-7.039531,-76.729127,"San Martin, Peru",899600 +60423,PE,PER,604,Tacna,Peru,-17.644161,-70.27756,"Tacna, Peru",371000 +60424,PE,PER,604,Tumbes,Peru,-3.857496,-80.545255,"Tumbes, Peru",251500 +60425,PE,PER,604,Ucayali,Peru,-9.621718,-73.444929,"Ucayali, Peru",589100 +61601,PL,POL,616,Dolnoslaskie,Poland,51.134,16.8842,"Dolnoslaskie, Poland",2901225 +61602,PL,POL,616,Kujawsko-pomorskie,Poland,53.1648,18.4834,"Kujawsko-pomorskie, Poland",2077775 +61603,PL,POL,616,Lubelskie,Poland,51.2494,23.1011,"Lubelskie, Poland",2117619 +61604,PL,POL,616,Lubuskie,Poland,52.2275,15.2559,"Lubuskie, Poland",1014548 +61605,PL,POL,616,Lodzkie,Poland,51.4635,19.1727,"Lodzkie, Poland",2466322 +61606,PL,POL,616,Malopolskie,Poland,49.7225,20.2503,"Malopolskie, Poland",3400577 +61607,PL,POL,616,Mazowieckie,Poland,51.8927,21.0022,"Mazowieckie, Poland",5403412 +61608,PL,POL,616,Opolskie,Poland,50.8004,17.938,"Opolskie, Poland",986506 +61609,PL,POL,616,Podkarpackie,Poland,50.0575,22.0896,"Podkarpackie, Poland",2129015 +61610,PL,POL,616,Podlaskie,Poland,53.0697,22.9675,"Podlaskie, Poland",1181533 +61611,PL,POL,616,Pomorskie,Poland,54.2944,18.1531,"Pomorskie, Poland",2333523 +61612,PL,POL,616,Slaskie,Poland,50.5717,19.322,"Slaskie, Poland",4533565 +61613,PL,POL,616,Swietokrzyskie,Poland,50.6261,20.9406,"Swietokrzyskie, Poland",1241546 +61614,PL,POL,616,Warminsko-mazurskie,Poland,53.8671,20.7028,"Warminsko-mazurskie, Poland",1428983 +61615,PL,POL,616,Wielkopolskie,Poland,52.28,17.3523,"Wielkopolskie, Poland",3493969 +61616,PL,POL,616,Zachodniopomorskie,Poland,53.4658,15.1823,"Zachodniopomorskie, Poland",1701030 +64201,RO,ROU,642,Alba,Romania,46.1559,23.5556,"Alba, Romania",74000 +64202,RO,ROU,642,Arad,Romania,46.176,21.319,"Arad, Romania",409072 +64203,RO,ROU,642,Arges,Romania,45.0723,24.8143,"Arges, Romania",612431 +64204,RO,ROU,642,Bacau,Romania,46.5833,26.9167,"Bacau, Romania",616168 +64205,RO,ROU,642,Bihor,Romania,47.0158,22.1723,"Bihor, Romania",575398 +64206,RO,ROU,642,Bistrita-Nasaud,Romania,47.2486,24.5323,"Bistrita-Nasaud, Romania",277861 +64207,RO,ROU,642,Botosani,Romania,47.745,26.6621,"Botosani, Romania",412626 +64208,RO,ROU,642,Braila,Romania,45.271,27.9743,"Braila, Romania",304925 +64209,RO,ROU,642,Brasov,Romania,45.6667,25.6167,"Brasov, Romania",549217 +64210,RO,ROU,642,Bucuresti,Romania,44.4268,26.1025,"Bucuresti, Romania",1883425 +64211,RO,ROU,642,Buzau,Romania,45.1667,26.8167,"Buzau, Romania",432054 +64212,RO,ROU,642,Calarasi,Romania,44.2085,27.3137,"Calarasi, Romania",285050 +64213,RO,ROU,642,Caras-Severin,Romania,45.114,22.0741,"Caras-Severin, Romania",274277 +64214,RO,ROU,642,Cluj,Romania,46.7667,23.5833,"Cluj, Romania",691106 +64215,RO,ROU,642,Constanta,Romania,44.1773,28.6529,"Constanta, Romania",684082 +64216,RO,ROU,642,Covasna,Romania,45.8446,26.1687,"Covasna, Romania",210177 +64217,RO,ROU,642,Dambovita,Romania,44.929,25.4254,"Dambovita, Romania",518745 +64218,RO,ROU,642,Dolj,Romania,44.1623,23.6325,"Dolj, Romania",660544 +64219,RO,ROU,642,Galati,Romania,45.4382,28.0563,"Galati, Romania",536167 +64220,RO,ROU,642,Giurgiu,Romania,43.9008,25.9739,"Giurgiu, Romania",265494 +64221,RO,ROU,642,Gorj,Romania,44.9486,23.2427,"Gorj, Romania",334238 +64222,RO,ROU,642,Harghita,Romania,46.4929,25.6457,"Harghita, Romania",304969 +64223,RO,ROU,642,Hunedoara,Romania,45.7697,22.9203,"Hunedoara, Romania",396253 +64224,RO,ROU,642,Ialomita,Romania,44.6031,27.379,"Ialomita, Romania",258669 +64225,RO,ROU,642,Iasi,Romania,47.1598,27.5872,"Iasi, Romania",772348 +64226,RO,ROU,642,Ilfov,Romania,44.5355,26.2325,"Ilfov, Romania",388738 +64227,RO,ROU,642,Maramures,Romania,47.6738,23.7456,"Maramures, Romania",516562 +64228,RO,ROU,642,Mehedinti,Romania,44.5515,22.9044,"Mehedinti, Romania",254570 +64229,RO,ROU,642,Mures,Romania,46.557,24.6723,"Mures, Romania",550846 +64230,RO,ROU,642,Neamt,Romania,46.9759,26.3819,"Neamt, Romania",470766 +64231,RO,ROU,642,Olt,Romania,44.2008,24.5023,"Olt, Romania",415530 +64232,RO,ROU,642,Prahova,Romania,45.0892,26.0829,"Prahova, Romania",762886 +64233,RO,ROU,642,Salaj,Romania,47.2091,23.2122,"Salaj, Romania",224384 +64234,RO,ROU,642,Satu Mare,Romania,47.79,22.89,"Satu Mare, Romania",329079 +64235,RO,ROU,642,Sibiu,Romania,45.7969,24.15,"Sibiu, Romania",375992 +64236,RO,ROU,642,Suceava,Romania,47.6514,26.2556,"Suceava, Romania",634810 +64237,RO,ROU,642,Teleorman,Romania,44.016,25.2987,"Teleorman, Romania",360178 +64238,RO,ROU,642,Timis,Romania,45.8139,21.3331,"Timis, Romania",683540 +64239,RO,ROU,642,Tulcea,Romania,45.1767,28.8052,"Tulcea, Romania",201462 +64240,RO,ROU,642,Valcea,Romania,45.0798,24.0835,"Valcea, Romania",355320 +64241,RO,ROU,642,Vaslui,Romania,46.6381,27.7288,"Vaslui, Romania",395500 +64242,RO,ROU,642,Vrancea,Romania,45.8135,27.0658,"Vrancea, Romania",340310 +64301,RU,RUS,643,Adygea Republic,Russia,44.6939006,40.1520421,"Adygea Republic, Russia",453376 +64302,RU,RUS,643,Altai Krai,Russia,52.6932243,82.6931424,"Altai Krai, Russia",2350080 +64303,RU,RUS,643,Altai Republic,Russia,50.7114101,86.8572186,"Altai Republic, Russia",218063 +64304,RU,RUS,643,Amur Oblast,Russia,52.8032368,128.437295,"Amur Oblast, Russia",798424 +64305,RU,RUS,643,Arkhangelsk Oblast,Russia,63.5589686,43.1221646,"Arkhangelsk Oblast, Russia",1155028 +64306,RU,RUS,643,Astrakhan Oblast,Russia,47.1878186,47.608851,"Astrakhan Oblast, Russia",1017514 +64307,RU,RUS,643,Bashkortostan Republic,Russia,54.8573563,57.1439682,"Bashkortostan Republic, Russia",4063293 +64308,RU,RUS,643,Belgorod Oblast,Russia,50.7080119,37.5837615,"Belgorod Oblast, Russia",1549876 +64309,RU,RUS,643,Bryansk Oblast,Russia,52.8873315,33.415853,"Bryansk Oblast, Russia",1210982 +64310,RU,RUS,643,Buryatia Republic,Russia,52.7182426,109.492143,"Buryatia Republic, Russia",984511 +64311,RU,RUS,643,Chechen Republic,Russia,43.3976147,45.6985005,"Chechen Republic, Russia",1436981 +64312,RU,RUS,643,Chelyabinsk Oblast,Russia,54.4223954,61.1865846,"Chelyabinsk Oblast, Russia",3493036 +64313,RU,RUS,643,Chukotka Autonomous Okrug,Russia,66.0006475,169.4900869,"Chukotka Autonomous Okrug, Russia",49348 +64314,RU,RUS,643,Chuvashia Republic,Russia,55.4259922,47.0849429,"Chuvashia Republic, Russia",1231117 +64315,RU,RUS,643,Dagestan Republic,Russia,43.0574916,47.1332224,"Dagestan Republic, Russia",3063885 +64316,RU,RUS,643,Ingushetia Republic,Russia,43.11542075,45.01713552,"Ingushetia Republic, Russia",488043 +64317,RU,RUS,643,Irkutsk Oblast,Russia,56.6370122,104.719221,"Irkutsk Oblast, Russia",2404195 +64318,RU,RUS,643,Ivanovo Oblast,Russia,56.9167446,41.4352137,"Ivanovo Oblast, Russia",1014646 +64319,RU,RUS,643,Jewish Autonomous Okrug,Russia,48.57527615,132.6630746,"Jewish Autonomous Okrug, Russia",162014 +64320,RU,RUS,643,Kabardino-Balkarian Republic,Russia,43.4806048,43.5978976,"Kabardino-Balkarian Republic, Russia",865828 +64321,RU,RUS,643,Kaliningrad Oblast,Russia,54.7293041,21.1489473,"Kaliningrad Oblast, Russia",994599 +64322,RU,RUS,643,Kalmykia Republic,Russia,46.2313018,45.3275745,"Kalmykia Republic, Russia",275413 +64323,RU,RUS,643,Kaluga Oblast,Russia,54.4382773,35.5272854,"Kaluga Oblast, Russia",1012056 +64324,RU,RUS,643,Kamchatka Krai,Russia,57.1914882,160.0383819,"Kamchatka Krai, Russia",315557 +64325,RU,RUS,643,Karachay-Cherkess Republic,Russia,43.7368326,41.7267991,"Karachay-Cherkess Republic, Russia",466305 +64326,RU,RUS,643,Karelia Republic,Russia,62.6194031,33.4920267,"Karelia Republic, Russia",622484 +64327,RU,RUS,643,Kemerovo Oblast,Russia,54.5335781,87.342861,"Kemerovo Oblast, Russia",2694877 +64328,RU,RUS,643,Khabarovsk Krai,Russia,51.6312684,136.121524,"Khabarovsk Krai, Russia",1328302 +64329,RU,RUS,643,Khakassia Republic,Russia,53.72258845,91.44293627,"Khakassia Republic, Russia",537513 +64330,RU,RUS,643,Khanty-Mansi Autonomous Okrug,Russia,61.0259025,69.0982628,"Khanty-Mansi Autonomous Okrug, Russia",1532243 +64331,RU,RUS,643,Kirov Oblast,Russia,57.9665589,49.4074599,"Kirov Oblast, Russia",1283238 +64332,RU,RUS,643,Komi Republic,Russia,63.9881421,54.3326073,"Komi Republic, Russia",840873 +64333,RU,RUS,643,Kostroma Oblast,Russia,58.424756,44.2533273,"Kostroma Oblast, Russia",643324 +64334,RU,RUS,643,Krasnodar Krai,Russia,45.7684014,39.0261044,"Krasnodar Krai, Russia",5603420 +64335,RU,RUS,643,Krasnoyarsk Krai,Russia,63.3233807,97.0979974,"Krasnoyarsk Krai, Russia",2876497 +64336,RU,RUS,643,Kurgan Oblast,Russia,55.7655302,64.5632681,"Kurgan Oblast, Russia",845537 +64337,RU,RUS,643,Kursk Oblast,Russia,51.6568453,36.4852695,"Kursk Oblast, Russia",1115237 +64338,RU,RUS,643,Leningrad Oblast,Russia,60.1853296,32.3925325,"Leningrad Oblast, Russia",1813816 +64339,RU,RUS,643,Lipetsk Oblast,Russia,52.6935178,39.1122664,"Lipetsk Oblast, Russia",1150201 +64340,RU,RUS,643,Magadan Oblast,Russia,62.48858785,153.9903764,"Magadan Oblast, Russia",144091 +64341,RU,RUS,643,Mari El Republic,Russia,56.5767504,47.8817512,"Mari El Republic, Russia",682333 +64342,RU,RUS,643,Mordovia Republic,Russia,54.4419829,44.4661144,"Mordovia Republic, Russia",805056 +64343,RU,RUS,643,Moscow,Russia,55.7504461,37.6174943,"Moscow, Russia",12506468 +64344,RU,RUS,643,Moscow Oblast,Russia,55.5043158,38.0353929,"Moscow Oblast, Russia",7503385 +64345,RU,RUS,643,Murmansk Oblast,Russia,68.0000418,33.9999151,"Murmansk Oblast, Russia",753557 +64346,RU,RUS,643,Nenets Autonomous Okrug,Russia,68.27557185,57.1686375,"Nenets Autonomous Okrug, Russia",43997 +64347,RU,RUS,643,Nizhny Novgorod Oblast,Russia,55.4718033,44.0911594,"Nizhny Novgorod Oblast, Russia",3234752 +64348,RU,RUS,643,North Ossetia - Alania Republic,Russia,42.7933611,44.6324493,"North Ossetia - Alania Republic, Russia",701765 +64349,RU,RUS,643,Novgorod Oblast,Russia,58.2843833,32.5169757,"Novgorod Oblast, Russia",606476 +64350,RU,RUS,643,Novosibirsk Oblast,Russia,54.9720169,79.4813924,"Novosibirsk Oblast, Russia",2788849 +64351,RU,RUS,643,Omsk Oblast,Russia,56.0935263,73.5099936,"Omsk Oblast, Russia",1960081 +64352,RU,RUS,643,Orel Oblast,Russia,52.9685433,36.0692477,"Orel Oblast, Russia",747247 +64353,RU,RUS,643,Orenburg Oblast,Russia,52.0269262,54.7276647,"Orenburg Oblast, Russia",1977720 +64354,RU,RUS,643,Penza Oblast,Russia,53.1655415,44.7879181,"Penza Oblast, Russia",1331655 +64355,RU,RUS,643,Perm Krai,Russia,58.5951603,56.3159546,"Perm Krai, Russia",2623122 +64356,RU,RUS,643,Primorsky Krai,Russia,45.0819456,134.726645,"Primorsky Krai, Russia",1913037 +64357,RU,RUS,643,Pskov Oblast,Russia,57.5358729,28.8586826,"Pskov Oblast, Russia",636546 +64358,RU,RUS,643,Rostov Oblast,Russia,47.6222451,40.7957942,"Rostov Oblast, Russia",4220452 +64359,RU,RUS,643,Ryazan Oblast,Russia,54.4226732,40.5705246,"Ryazan Oblast, Russia",1121474 +64360,RU,RUS,643,Saint Petersburg,Russia,59.9606739,30.1586551,"Saint Petersburg, Russia",5351935 +64361,RU,RUS,643,Sakha (Yakutiya) Republic,Russia,66.941626,129.642371,"Sakha (Yakutiya) Republic, Russia",964330 +64362,RU,RUS,643,Sakhalin Oblast,Russia,49.7219665,143.448533,"Sakhalin Oblast, Russia",490181 +64363,RU,RUS,643,Samara Oblast,Russia,53.2128813,50.8914633,"Samara Oblast, Russia",3193514 +64364,RU,RUS,643,Saratov Oblast,Russia,51.6520555,46.8631952,"Saratov Oblast, Russia",2462950 +64365,RU,RUS,643,Smolensk Oblast,Russia,55.0343496,33.0192065,"Smolensk Oblast, Russia",949348 +64366,RU,RUS,643,Stavropol Krai,Russia,44.8632577,43.4406913,"Stavropol Krai, Russia",2800674 +64367,RU,RUS,643,Sverdlovsk Oblast,Russia,58.6414755,61.8021546,"Sverdlovsk Oblast, Russia",4325256 +64368,RU,RUS,643,Tambov Oblast,Russia,52.9019574,41.3578918,"Tambov Oblast, Russia",1033552 +64369,RU,RUS,643,Tatarstan Republic,Russia,55.7648572,52.43104273,"Tatarstan Republic, Russia",3894284 +64370,RU,RUS,643,Tomsk Oblast,Russia,58.6124279,82.0475315,"Tomsk Oblast, Russia",1078280 +64371,RU,RUS,643,Tula Oblast,Russia,53.9570701,37.3690909,"Tula Oblast, Russia",1491855 +64372,RU,RUS,643,Tver Oblast,Russia,57.1134475,35.1744428,"Tver Oblast, Russia",1283873 +64373,RU,RUS,643,Tyumen Oblast,Russia,58.8206488,70.3658837,"Tyumen Oblast, Russia",3692400 +64374,RU,RUS,643,Tyva Republic,Russia,51.4017149,93.8582593,"Tyva Republic, Russia",321722 +64375,RU,RUS,643,Udmurt Republic,Russia,57.1961165,52.6959832,"Udmurt Republic, Russia",1513044 +64376,RU,RUS,643,Ulyanovsk Oblast,Russia,54.1463177,47.2324921,"Ulyanovsk Oblast, Russia",1246618 +64377,RU,RUS,643,Vladimir Oblast,Russia,56.0503336,40.6561633,"Vladimir Oblast, Russia",1378337 +64378,RU,RUS,643,Volgograd Oblast,Russia,49.6048339,44.2903582,"Volgograd Oblast, Russia",2521276 +64379,RU,RUS,643,Vologda Oblast,Russia,60.0391461,43.1215213,"Vologda Oblast, Russia",1176689 +64380,RU,RUS,643,Voronezh Oblast,Russia,50.9800393,40.1506507,"Voronezh Oblast, Russia",2333768 +64381,RU,RUS,643,Yamalo-Nenets Autonomous Okrug,Russia,67.1471631,74.3415488,"Yamalo-Nenets Autonomous Okrug, Russia",538547 +64382,RU,RUS,643,Yaroslavl Oblast,Russia,57.7781976,39.0021095,"Yaroslavl Oblast, Russia",1265684 +64383,RU,RUS,643,Zabaykalsky Krai,Russia,52.248521,115.956325,"Zabaykalsky Krai, Russia",1072806 +70301,SK,SVK,703,Banska Bystrica,Slovakia,48.7363,19.1462,"Banska Bystrica, Slovakia",657119 +70302,SK,SVK,703,Bratislava,Slovakia,48.1486,17.107,"Bratislava, Slovakia",603699 +70303,SK,SVK,703,Kosice,Slovakia,48.7164,21.2611,"Kosice, Slovakia",771947 +70304,SK,SVK,703,Nitra,Slovakia,48.3061,18.0764,"Nitra, Slovakia",708498 +70305,SK,SVK,703,Presov,Slovakia,49.0018,21.2393,"Presov, Slovakia",798596 +70306,SK,SVK,703,Trencin,Slovakia,48.8849,18.0335,"Trencin, Slovakia",600386 +70307,SK,SVK,703,Trnava,Slovakia,48.3709,17.5833,"Trnava, Slovakia",554172 +70308,SK,SVK,703,Zilina,Slovakia,49.2194,18.7408,"Zilina, Slovakia",694763 +72401,ES,ESP,724,Andalusia,Spain,37.5443,-4.7278,"Andalusia, Spain",8427405 +72402,ES,ESP,724,Aragon,Spain,41.5976,-0.9057,"Aragon, Spain",1320586 +72403,ES,ESP,724,Asturias,Spain,43.3614,-5.8593,"Asturias, Spain",1022205 +72404,ES,ESP,724,Baleares,Spain,39.710358,2.995148,"Baleares, Spain",1188220 +72405,ES,ESP,724,Canarias,Spain,28.2916,-16.6291,"Canarias, Spain",2206901 +72406,ES,ESP,724,Cantabria,Spain,43.1828,-3.9878,"Cantabria, Spain",581641 +72407,ES,ESP,724,Castilla - La Mancha,Spain,39.2796,-3.0977,"Castilla - La Mancha, Spain",2034877 +72408,ES,ESP,724,Castilla y Leon,Spain,41.8357,-4.3976,"Castilla y Leon, Spain",2407733 +72409,ES,ESP,724,Catalonia,Spain,41.5912,1.5209,"Catalonia, Spain",7566431 +72410,ES,ESP,724,Ceuta,Spain,35.8894,-5.3213,"Ceuta, Spain",84829 +72411,ES,ESP,724,C. Valenciana,Spain,39.484,-0.7533,"C. Valenciana, Spain",4974969 +72412,ES,ESP,724,Extremadura,Spain,39.4937,-6.0679,"Extremadura, Spain",1065424 +72413,ES,ESP,724,Galicia,Spain,42.5751,-8.1339,"Galicia, Spain",2700441 +72414,ES,ESP,724,Madrid,Spain,40.4168,-3.7038,"Madrid, Spain",6641649 +72415,ES,ESP,724,Melilla,Spain,35.2923,-2.9381,"Melilla, Spain",84689 +72416,ES,ESP,724,Murcia,Spain,37.9922,-1.1307,"Murcia, Spain",1487663 +72417,ES,ESP,724,Navarra,Spain,42.6954,-1.6761,"Navarra, Spain",649946 +72418,ES,ESP,724,Pais Vasco,Spain,42.9896,-2.6189,"Pais Vasco, Spain",2177880 +72419,ES,ESP,724,La Rioja,Spain,42.2871,-2.5396,"La Rioja, Spain",313571 +75201,SE,SWE,752,Blekinge,Sweden,56.2784,15.018,"Blekinge, Sweden",159606 +75202,SE,SWE,752,Dalarna,Sweden,61.0917,14.6664,"Dalarna, Sweden",287966 +75203,SE,SWE,752,Gavleborg,Sweden,61.3012,16.1534,"Gavleborg, Sweden",287382 +75204,SE,SWE,752,Gotland,Sweden,57.4684,18.4867,"Gotland, Sweden",59686 +75205,SE,SWE,752,Halland,Sweden,56.8967,12.8034,"Halland, Sweden",333848 +75206,SE,SWE,752,Jamtland Harjedalen,Sweden,63.1712,14.9592,"Jamtland Harjedalen, Sweden",130810 +75207,SE,SWE,752,Jonkoping,Sweden,57.3708,14.3439,"Jonkoping, Sweden",363599 +75208,SE,SWE,752,Kalmar,Sweden,57.235,16.1849,"Kalmar, Sweden",245446 +75209,SE,SWE,752,Kronoberg,Sweden,56.7183,14.4115,"Kronoberg, Sweden",201469 +75210,SE,SWE,752,Norrbotten,Sweden,66.8309,20.3992,"Norrbotten, Sweden",250093 +75211,SE,SWE,752,Orebro,Sweden,59.535,15.0066,"Orebro, Sweden",304805 +75212,SE,SWE,752,Ostergotland,Sweden,58.3454,15.5198,"Ostergotland, Sweden",465495 +75213,SE,SWE,752,Skane,Sweden,55.9903,13.5958,"Skane, Sweden",1377827 +75214,SE,SWE,752,Sormland,Sweden,59.0336,16.7519,"Sormland, Sweden",297540 +75215,SE,SWE,752,Stockholm,Sweden,59.6025,18.1384,"Stockholm, Sweden",2377081 +75216,SE,SWE,752,Uppsala,Sweden,60.0092,17.2715,"Uppsala, Sweden",383713 +75217,SE,SWE,752,Varmland,Sweden,59.7294,13.2354,"Varmland, Sweden",282414 +75218,SE,SWE,752,Vasterbotten,Sweden,65.3337,16.5162,"Vasterbotten, Sweden",271736 +75219,SE,SWE,752,Vasternorrland,Sweden,63.4276,17.7292,"Vasternorrland, Sweden",245347 +75220,SE,SWE,752,Vastmanland,Sweden,59.6714,16.2159,"Vastmanland, Sweden",275845 +75221,SE,SWE,752,Vastra Gotaland,Sweden,58.2528,13.0596,"Vastra Gotaland, Sweden",1725881 +80401,UA,UKR,804,Cherkasy Oblast,Ukraine,49.4444,32.0598,"Cherkasy Oblast, Ukraine",1206351 +80402,UA,UKR,804,Chernihiv Oblast,Ukraine,51.4982,31.2893,"Chernihiv Oblast, Ukraine",1005745 +80403,UA,UKR,804,Chernivtsi Oblast,Ukraine,48.2917,25.9352,"Chernivtsi Oblast, Ukraine",904374 +80404,UA,UKR,804,Crimea Republic*,Ukraine,45.2835,34.2008,"Crimea Republic*, Ukraine",1913731 +80405,UA,UKR,804,Dnipropetrovsk Oblast,Ukraine,48.4647,35.0462,"Dnipropetrovsk Oblast, Ukraine",3206477 +80406,UA,UKR,804,Donetsk Oblast,Ukraine,48.0159,37.8028,"Donetsk Oblast, Ukraine",4165901 +80407,UA,UKR,804,Ivano-Frankivsk Oblast,Ukraine,48.9226,24.7111,"Ivano-Frankivsk Oblast, Ukraine",1373252 +80408,UA,UKR,804,Kharkiv Oblast,Ukraine,49.9935,36.2304,"Kharkiv Oblast, Ukraine",2675598 +80409,UA,UKR,804,Kherson Oblast,Ukraine,46.6354,32.6169,"Kherson Oblast, Ukraine",1037640 +80410,UA,UKR,804,Khmelnytskyi Oblast,Ukraine,49.423,26.9871,"Khmelnytskyi Oblast, Ukraine",1264705 +80411,UA,UKR,804,Kiev,Ukraine,50.4501,30.5234,"Kiev, Ukraine",2950800 +80412,UA,UKR,804,Kiev Oblast,Ukraine,50.053,30.7667,"Kiev Oblast, Ukraine",1767940 +80413,UA,UKR,804,Kirovohrad Oblast,Ukraine,48.5079,32.2623,"Kirovohrad Oblast, Ukraine",945549 +80414,UA,UKR,804,Luhansk Oblast,Ukraine,48.574,39.3078,"Luhansk Oblast, Ukraine",2151833 +80415,UA,UKR,804,Lviv Oblast,Ukraine,49.8397,24.0297,"Lviv Oblast, Ukraine",2522021 +80416,UA,UKR,804,Mykolaiv Oblast,Ukraine,46.975,31.9946,"Mykolaiv Oblast, Ukraine",2522021 +80417,UA,UKR,804,Odessa Oblast,Ukraine,46.4846,30.7326,"Odessa Oblast, Ukraine",2380308 +80418,UA,UKR,804,Poltava Oblast,Ukraine,49.5883,34.5514,"Poltava Oblast, Ukraine",1400439 +80419,UA,UKR,804,Rivne Oblast,Ukraine,50.6199,26.2516,"Rivne Oblast, Ukraine",1157301 +80420,UA,UKR,804,Sevastopol*,Ukraine,44.6054,33.522,"Sevastopol*, Ukraine",443211 +80421,UA,UKR,804,Sumy Oblast,Ukraine,50.9077,34.7981,"Sumy Oblast, Ukraine",1081418 +80422,UA,UKR,804,Ternopil Oblast,Ukraine,49.5535,25.5948,"Ternopil Oblast, Ukraine",1045879 +80423,UA,UKR,804,Vinnytsia Oblast,Ukraine,49.2331,28.4682,"Vinnytsia Oblast, Ukraine",1560394 +80424,UA,UKR,804,Volyn Oblast,Ukraine,50.7472,25.3254,"Volyn Oblast, Ukraine",1035330 +80425,UA,UKR,804,Zakarpattia Oblast,Ukraine,48.6208,22.2879,"Zakarpattia Oblast, Ukraine",1256802 +80426,UA,UKR,804,Zaporizhia Oblast,Ukraine,47.8388,35.1396,"Zaporizhia Oblast, Ukraine",1705836 +80427,UA,UKR,804,Zhytomyr Oblast,Ukraine,50.2547,28.6587,"Zhytomyr Oblast, Ukraine",1220193 +82601,GB,GBR,826,England,United Kingdom,52.3555,-1.1743,"England, United Kingdom",55977200 +82602,GB,GBR,826,Northern Ireland,United Kingdom,54.7877,-6.4923,"Northern Ireland, United Kingdom",1881600 +82603,GB,GBR,826,Scotland,United Kingdom,56.4907,-4.2026,"Scotland, United Kingdom",5463300 +82604,GB,GBR,826,Wales,United Kingdom,52.1307,-3.7837,"Wales, United Kingdom",3138600 +60,BM,BMU,60,Bermuda,United Kingdom,32.3078,-64.7505,"Bermuda, United Kingdom",62273 +92,VG,VGB,92,British Virgin Islands,United Kingdom,18.4207,-64.64,"British Virgin Islands, United Kingdom",30237 +136,KY,CYM,136,Cayman Islands,United Kingdom,19.3133,-81.2546,"Cayman Islands, United Kingdom",65720 +8261,GB,GBR,826,Channel Islands,United Kingdom,49.3723,-2.3644,"Channel Islands, United Kingdom",170499 +831,GG,GGY,831,Guernsey,United Kingdom,49.448196,-2.58949,"Guernsey, United Kingdom",63000 +832,JE,JEY,832,Jersey,United Kingdom,49.2138,-2.1358,"Jersey, United Kingdom",109300 +238,FK,FLK,238,Falkland Islands (Malvinas),United Kingdom,-51.7963,-59.5236,"Falkland Islands (Malvinas), United Kingdom",3483 +292,GI,GIB,292,Gibraltar,United Kingdom,36.1408,-5.3536,"Gibraltar, United Kingdom",33691 +833,IM,IMN,833,Isle of Man,United Kingdom,54.2361,-4.5481,"Isle of Man, United Kingdom",85032 +500,MS,MSR,500,Montserrat,United Kingdom,16.742498,-62.187366,"Montserrat, United Kingdom",4999 +796,TC,TCA,796,Turks and Caicos Islands,United Kingdom,21.694,-71.7979,"Turks and Caicos Islands, United Kingdom",38718 +612,PN,PCN,612,Pitcairn Islands,United Kingdom,-24.3768,-128.3242,"Pitcairn Islands, United Kingdom",67 +660,AI,AIA,660,Anguilla,United Kingdom,18.2206,-63.0686,"Anguilla, United Kingdom",15002 +654,SH,SHN,654,"Saint Helena, Ascension and Tristan da Cunha",United Kingdom,-7.9467,-14.3559,"Saint Helena, Ascension and Tristan da Cunha, United Kingdom",5661 +3601,AU,AUS,36,Australian Capital Territory,Australia,-35.4735,149.0124,"Australian Capital Territory, Australia",428100 +3602,AU,AUS,36,New South Wales,Australia,-33.8688,151.2093,"New South Wales, Australia",8118000 +3603,AU,AUS,36,Northern Territory,Australia,-12.4634,130.8456,"Northern Territory, Australia",245600 +3604,AU,AUS,36,Queensland,Australia,-27.4698,153.0251,"Queensland, Australia",5115500 +3605,AU,AUS,36,South Australia,Australia,-34.9285,138.6007,"South Australia, Australia",1756500 +3606,AU,AUS,36,Tasmania,Australia,-42.8821,147.3272,"Tasmania, Australia",535500 +3607,AU,AUS,36,Victoria,Australia,-37.8136,144.9631,"Victoria, Australia",6629900 +3608,AU,AUS,36,Western Australia,Australia,-31.9505,115.8605,"Western Australia, Australia",2630600 +12401,CA,CAN,124,Alberta,Canada,53.9333,-116.5765,"Alberta, Canada",4442879 +12402,CA,CAN,124,British Columbia,Canada,53.7267,-127.6476,"British Columbia, Canada",5214805 +12403,CA,CAN,124,Manitoba,Canada,53.7609,-98.8139,"Manitoba, Canada",1383765 +12404,CA,CAN,124,New Brunswick,Canada,46.5653,-66.4619,"New Brunswick, Canada",789225 +12405,CA,CAN,124,Newfoundland and Labrador,Canada,53.1355,-57.6604,"Newfoundland and Labrador, Canada",520553 +12406,CA,CAN,124,Northwest Territories,Canada,64.8255,-124.8457,"Northwest Territories,Canada",45504 +12407,CA,CAN,124,Nova Scotia,Canada,44.682,-63.7443,"Nova Scotia, Canada",992055 +12408,CA,CAN,124,Ontario,Canada,51.2538,-85.3232,"Ontario, Canada",14826276 +12409,CA,CAN,124,Prince Edward Island,Canada,46.5107,-63.4168,"Prince Edward Island, Canada",164318 +12410,CA,CAN,124,Quebec,Canada,52.9399,-73.5491,"Quebec, Canada",8604495 +12411,CA,CAN,124,Saskatchewan,Canada,52.9399,-106.4509,"Saskatchewan, Canada",1179844 +12412,CA,CAN,124,Yukon,Canada,64.2823,-135,"Yukon, Canada",42986 +12416,CA,CAN,124,Nunavut,Canada,70.2998,-83.1076,"Nunavut, Canada",39403 +15601,CN,CHN,156,Anhui,China,31.8257,117.2264,"Anhui, China",61027171 +15602,CN,CHN,156,Beijing,China,40.1824,116.4142,"Beijing, China",21893095 +15603,CN,CHN,156,Chongqing,China,30.0572,107.874,"Chongqing, China",32054159 +15604,CN,CHN,156,Fujian,China,26.0789,117.9874,"Fujian, China",41540086 +15605,CN,CHN,156,Gansu,China,35.7518,104.2861,"Gansu, China",25019831 +15606,CN,CHN,156,Guangdong,China,23.3417,113.4244,"Guangdong, China",126012510 +15607,CN,CHN,156,Guangxi,China,23.8298,108.7881,"Guangxi, China",50126804 +15608,CN,CHN,156,Guizhou,China,26.8154,106.8748,"Guizhou, China",38562148 +15609,CN,CHN,156,Hainan,China,19.1959,109.7453,"Hainan, China",10081232 +15610,CN,CHN,156,Hebei,China,37.8957,114.9042,"Hebei, China",74610235 +15611,CN,CHN,156,Heilongjiang,China,47.862,127.7615,"Heilongjiang, China",31850088 +15612,CN,CHN,156,Henan,China,33.882,113.614,"Henan, China",99365519 +15613,CN,CHN,156,Hubei,China,30.9756,112.2707,"Hubei, China",57752557 +15614,CN,CHN,156,Hunan,China,27.6104,111.7088,"Hunan, China",66444864 +15615,CN,CHN,156,Inner Mongolia,China,44.0935,113.9448,"Inner Mongolia, China",24049155 +15616,CN,CHN,156,Jiangsu,China,32.9711,119.455,"Jiangsu, China",84748016 +15617,CN,CHN,156,Jiangxi,China,27.614,115.7221,"Jiangxi, China",45188635 +15618,CN,CHN,156,Jilin,China,43.6661,126.1923,"Jilin, China",24073453 +15619,CN,CHN,156,Liaoning,China,41.2956,122.6085,"Liaoning, China",42591407 +15620,CN,CHN,156,Ningxia,China,37.2692,106.1655,"Ningxia, China",7202654 +15621,CN,CHN,156,Qinghai,China,35.7452,95.9956,"Qinghai, China",5923957 +15622,CN,CHN,156,Shaanxi,China,35.1917,108.8701,"Shaanxi, China",39528999 +15623,CN,CHN,156,Shandong,China,36.3427,118.1498,"Shandong, China",101527453 +15624,CN,CHN,156,Shanghai,China,31.202,121.4491,"Shanghai, China",24870895 +15625,CN,CHN,156,Shanxi,China,37.5777,112.2922,"Shanxi, China",34915616 +15626,CN,CHN,156,Sichuan,China,30.6171,102.7103,"Sichuan, China",83674866 +15627,CN,CHN,156,Tianjin,China,39.3054,117.323,"Tianjin, China",13866009 +15628,CN,CHN,156,Tibet,China,31.6927,88.0924,"Tibet, China",3648100 +15629,CN,CHN,156,Xinjiang,China,41.1129,85.2401,"Xinjiang, China",25852345 +15630,CN,CHN,156,Yunnan,China,24.974,101.487,"Yunnan, China",47209277 +15631,CN,CHN,156,Zhejiang,China,29.1832,120.0934,"Zhejiang, China",64567588 +344,HK,HKG,344,Hong Kong,China,22.3,114.2,"Hong Kong, China",7496988 +446,MO,MAC,446,Macau,China,22.1667,113.55,"Macau, China",649342 +16,AS,ASM,16,American Samoa,US,-14.271,-170.132,"American Samoa, US",55641 +316,GU,GUM,316,Guam,US,13.4443,144.7937,"Guam, US",164229 +580,MP,MNP,580,Northern Mariana Islands,US,15.0979,145.6739,"Northern Mariana Islands, US",55144 +850,VI,VIR,850,Virgin Islands,US,18.3358,-64.8963,"Virgin Islands, US",107268 +630,PR,PRI,630,Puerto Rico,US,18.2208,-66.5901,"Puerto Rico, US",3193694 +84000001,US,USA,840,Alabama,US,32.3182,-86.9023,"Alabama, US",4903185 +84000002,US,USA,840,Alaska,US,61.3707,-152.4044,"Alaska, US",731545 +84000004,US,USA,840,Arizona,US,33.7298,-111.4312,"Arizona, US",7278717 +84000005,US,USA,840,Arkansas,US,34.9697,-92.3731,"Arkansas, US",3017804 +84000006,US,USA,840,California,US,36.1162,-119.6816,"California, US",39512223 +84000008,US,USA,840,Colorado,US,39.0598,-105.3111,"Colorado, US",5758736 +84000009,US,USA,840,Connecticut,US,41.5978,-72.7554,"Connecticut, US",3565287 +84000010,US,USA,840,Delaware,US,39.3185,-75.5071,"Delaware, US",973764 +84000011,US,USA,840,District of Columbia,US,38.8974,-77.0268,"District of Columbia, US",705749 +84000012,US,USA,840,Florida,US,27.7663,-81.6868,"Florida, US",21477737 +84000013,US,USA,840,Georgia,US,33.0406,-83.6431,"Georgia, US",10617423 +84000015,US,USA,840,Hawaii,US,21.0943,-157.4983,"Hawaii, US",1415872 +84000016,US,USA,840,Idaho,US,44.2405,-114.4788,"Idaho, US",1787065 +84000017,US,USA,840,Illinois,US,40.3495,-88.9861,"Illinois, US",12671821 +84000018,US,USA,840,Indiana,US,39.8494,-86.2583,"Indiana, US",6732219 +84000019,US,USA,840,Iowa,US,42.0115,-93.2105,"Iowa, US",3155070 +84000020,US,USA,840,Kansas,US,38.5266,-96.7265,"Kansas, US",2913314 +84000021,US,USA,840,Kentucky,US,37.6681,-84.6701,"Kentucky, US",4467673 +84000022,US,USA,840,Louisiana,US,31.1695,-91.8678,"Louisiana, US",4648794 +84000023,US,USA,840,Maine,US,44.6939,-69.3819,"Maine, US",1344212 +84000024,US,USA,840,Maryland,US,39.0639,-76.8021,"Maryland, US",6045680 +84000025,US,USA,840,Massachusetts,US,42.2302,-71.5301,"Massachusetts, US",6892503 +84000026,US,USA,840,Michigan,US,43.3266,-84.5361,"Michigan, US",9986857 +84000027,US,USA,840,Minnesota,US,45.6945,-93.9002,"Minnesota, US",5639632 +84000028,US,USA,840,Mississippi,US,32.7416,-89.6787,"Mississippi, US",2976149 +84000029,US,USA,840,Missouri,US,38.4561,-92.2884,"Missouri, US",6137428 +84000030,US,USA,840,Montana,US,46.9219,-110.4544,"Montana, US",1068778 +84000031,US,USA,840,Nebraska,US,41.1254,-98.2681,"Nebraska, US",1934408 +84000032,US,USA,840,Nevada,US,38.3135,-117.0554,"Nevada, US",3080156 +84000033,US,USA,840,New Hampshire,US,43.4525,-71.5639,"New Hampshire, US",1359711 +84000034,US,USA,840,New Jersey,US,40.2989,-74.521,"New Jersey, US",8882190 +84000035,US,USA,840,New Mexico,US,34.8405,-106.2485,"New Mexico, US",2096829 +84000036,US,USA,840,New York,US,42.1657,-74.9481,"New York, US",19453561 +84000037,US,USA,840,North Carolina,US,35.6301,-79.8064,"North Carolina, US",10488084 +84000038,US,USA,840,North Dakota,US,47.5289,-99.784,"North Dakota, US",762062 +84000039,US,USA,840,Ohio,US,40.3888,-82.7649,"Ohio, US",11689100 +84000040,US,USA,840,Oklahoma,US,35.5653,-96.9289,"Oklahoma, US",3956971 +84000041,US,USA,840,Oregon,US,44.572,-122.0709,"Oregon, US",4217737 +84000042,US,USA,840,Pennsylvania,US,40.5908,-77.2098,"Pennsylvania, US",12801989 +84000044,US,USA,840,Rhode Island,US,41.6809,-71.5118,"Rhode Island, US",1059361 +84000045,US,USA,840,South Carolina,US,33.8569,-80.945,"South Carolina, US",5148714 +84000046,US,USA,840,South Dakota,US,44.2998,-99.4388,"South Dakota, US",884659 +84000047,US,USA,840,Tennessee,US,35.7478,-86.6923,"Tennessee, US",6829174 +84000048,US,USA,840,Texas,US,31.0545,-97.5635,"Texas, US",28995881 +84000049,US,USA,840,Utah,US,40.15,-111.8624,"Utah, US",3205958 +84000050,US,USA,840,Vermont,US,44.0459,-72.7107,"Vermont, US",623989 +84000051,US,USA,840,Virginia,US,37.7693,-78.17,"Virginia, US",8535519 +84000053,US,USA,840,Washington,US,47.4009,-121.4905,"Washington, US",7614893 +84000054,US,USA,840,West Virginia,US,38.4912,-80.9545,"West Virginia, US",1792147 +84000055,US,USA,840,Wisconsin,US,44.2685,-89.6165,"Wisconsin, US",5822434 +84000056,US,USA,840,Wyoming,US,42.756,-107.3025,"Wyoming, US",578759 diff --git a/python/samples/learn_resources/resources/PopulationByCountry.csv b/python/samples/learn_resources/resources/PopulationByCountry.csv new file mode 100644 index 000000000000..b3dcae49eb73 --- /dev/null +++ b/python/samples/learn_resources/resources/PopulationByCountry.csv @@ -0,0 +1,199 @@ +UID,iso2,iso3,code3,Country_Region,Lat,Long,Population +4,AF,AFG,4,Afghanistan,33.93911,67.709953,38928341 +8,AL,ALB,8,Albania,41.1533,20.1683,2877800 +10,AQ,ATA,10,Antarctica,-71.9499,23.347,0 +12,DZ,DZA,12,Algeria,28.0339,1.6596,43851043 +20,AD,AND,20,Andorra,42.5063,1.5218,77265 +24,AO,AGO,24,Angola,-11.2027,17.8739,32866268 +28,AG,ATG,28,Antigua and Barbuda,17.0608,-61.7964,97928 +32,AR,ARG,32,Argentina,-38.4161,-63.6167,45195777 +51,AM,ARM,51,Armenia,40.0691,45.0382,2963234 +40,AT,AUT,40,Austria,47.5162,14.5501,9006400 +31,AZ,AZE,31,Azerbaijan,40.1431,47.5769,10139175 +44,BS,BHS,44,Bahamas,25.025885,-78.035889,393248 +48,BH,BHR,48,Bahrain,26.0275,50.55,1701583 +50,BD,BGD,50,Bangladesh,23.685,90.3563,164689383 +52,BB,BRB,52,Barbados,13.1939,-59.5432,287371 +112,BY,BLR,112,Belarus,53.7098,27.9534,9449321 +56,BE,BEL,56,Belgium,50.8333,4.469936,11492641 +84,BZ,BLZ,84,Belize,17.1899,-88.4976,397621 +204,BJ,BEN,204,Benin,9.3077,2.3158,12123198 +64,BT,BTN,64,Bhutan,27.5142,90.4336,771612 +68,BO,BOL,68,Bolivia,-16.2902,-63.5887,11673029 +70,BA,BIH,70,Bosnia and Herzegovina,43.9159,17.6791,3280815 +72,BW,BWA,72,Botswana,-22.3285,24.6849,2351625 +76,BR,BRA,76,Brazil,-14.235,-51.9253,212559409 +96,BN,BRN,96,Brunei,4.5353,114.7277,437483 +100,BG,BGR,100,Bulgaria,42.7339,25.4858,6948445 +854,BF,BFA,854,Burkina Faso,12.2383,-1.5616,20903278 +104,MM,MMR,104,Burma,21.9162,95.956,54409794 +108,BI,BDI,108,Burundi,-3.3731,29.9189,11890781 +132,CV,CPV,132,Cabo Verde,16.5388,-23.0418,555988 +116,KH,KHM,116,Cambodia,11.55,104.9167,16718971 +120,CM,CMR,120,Cameroon,3.848,11.5021,26545864 +140,CF,CAF,140,Central African Republic,6.6111,20.9394,4829764 +148,TD,TCD,148,Chad,15.4542,18.7322,16425859 +152,CL,CHL,152,Chile,-35.6751,-71.543,19116209 +170,CO,COL,170,Colombia,4.5709,-74.2973,50882884 +178,CG,COG,178,Congo (Brazzaville),-0.228,15.8277,5518092 +180,CD,COD,180,Congo (Kinshasa),-4.0383,21.7587,89561404 +174,KM,COM,174,Comoros,-11.6455,43.3333,869595 +188,CR,CRI,188,Costa Rica,9.7489,-83.7534,5094114 +384,CI,CIV,384,Cote d'Ivoire,7.54,-5.5471,26378275 +191,HR,HRV,191,Croatia,45.1,15.2,4105268 +192,CU,CUB,192,Cuba,21.521757,-77.781167,11326616 +196,CY,CYP,196,Cyprus,35.1264,33.4299,1207361 +203,CZ,CZE,203,Czechia,49.8175,15.473,10708982 +208,DK,DNK,208,Denmark,56.2639,9.5018,5837213 +262,DJ,DJI,262,Djibouti,11.8251,42.5903,988002 +212,DM,DMA,212,Dominica,15.415,-61.371,71991 +214,DO,DOM,214,Dominican Republic,18.7357,-70.1627,10847904 +218,EC,ECU,218,Ecuador,-1.8312,-78.1834,17643060 +818,EG,EGY,818,Egypt,26.820553,30.802498,102334403 +222,SV,SLV,222,El Salvador,13.7942,-88.8965,6486201 +226,GQ,GNQ,226,Equatorial Guinea,1.6508,10.2679,1402985 +232,ER,ERI,232,Eritrea,15.1794,39.7823,3546427 +233,EE,EST,233,Estonia,58.5953,25.0136,1326539 +748,SZ,SWZ,748,Eswatini,-26.5225,31.4659,1160164 +231,ET,ETH,231,Ethiopia,9.145,40.4897,114963583 +242,FJ,FJI,242,Fiji,-17.7134,178.065,896444 +246,FI,FIN,246,Finland,61.92411,25.748151,5540718 +250,FR,FRA,250,France,46.2276,2.2137,65249843 +266,GA,GAB,266,Gabon,-0.8037,11.6094,2225728 +270,GM,GMB,270,Gambia,13.4432,-15.3101,2416664 +268,GE,GEO,268,Georgia,42.3154,43.3569,3989175 +276,DE,DEU,276,Germany,51.165691,10.451526,83155031 +288,GH,GHA,288,Ghana,7.9465,-1.0232,31072945 +300,GR,GRC,300,Greece,39.0742,21.8243,10423056 +308,GD,GRD,308,Grenada,12.1165,-61.679,112519 +320,GT,GTM,320,Guatemala,15.7835,-90.2308,17915567 +324,GN,GIN,324,Guinea,9.9456,-9.6966,13132792 +624,GW,GNB,624,Guinea-Bissau,11.8037,-15.1804,1967998 +328,GY,GUY,328,Guyana,4.860416,-58.93018,786559 +332,HT,HTI,332,Haiti,18.9712,-72.2852,11402533 +336,VA,VAT,336,Holy See,41.9029,12.4534,809 +340,HN,HND,340,Honduras,15.2,-86.2419,9904608 +348,HU,HUN,348,Hungary,47.1625,19.5033,9660350 +352,IS,ISL,352,Iceland,64.9631,-19.0208,341250 +356,IN,IND,356,India,20.593684,78.96288,1380004385 +360,ID,IDN,360,Indonesia,-0.7893,113.9213,273523621 +364,IR,IRN,364,Iran,32.427908,53.688046,83992953 +368,IQ,IRQ,368,Iraq,33.223191,43.679291,40222503 +372,IE,IRL,372,Ireland,53.1424,-7.6921,4937796 +376,IL,ISR,376,Israel,31.046051,34.851612,8655541 +380,IT,ITA,380,Italy,41.87194,12.56738,60461828 +388,JM,JAM,388,Jamaica,18.1096,-77.2975,2961161 +392,JP,JPN,392,Japan,36.204824,138.252924,126476458 +400,JO,JOR,400,Jordan,31.24,36.51,10203140 +398,KZ,KAZ,398,Kazakhstan,48.0196,66.9237,18776707 +404,KE,KEN,404,Kenya,-0.0236,37.9062,53771300 +296,KI,KIR,296,Kiribati,-3.3704,-168.734,117606 +408,KP,PRK,408,"Korea, North",40.3399,127.5101,25778815 +410,KR,KOR,410,"Korea, South",35.907757,127.766922,51269183 +383,XK,XKS,383,Kosovo,42.602636,20.902977,1810366 +414,KW,KWT,414,Kuwait,29.31166,47.481766,4270563 +417,KG,KGZ,417,Kyrgyzstan,41.20438,74.766098,6524191 +418,LA,LAO,418,Laos,19.85627,102.495496,7275556 +428,LV,LVA,428,Latvia,56.8796,24.6032,1886202 +422,LB,LBN,422,Lebanon,33.8547,35.8623,6825442 +426,LS,LSO,426,Lesotho,-29.61,28.2336,2142252 +430,LR,LBR,430,Liberia,6.428055,-9.429499,5057677 +434,LY,LBY,434,Libya,26.3351,17.228331,6871287 +438,LI,LIE,438,Liechtenstein,47.14,9.55,38137 +440,LT,LTU,440,Lithuania,55.1694,23.8813,2722291 +442,LU,LUX,442,Luxembourg,49.8153,6.1296,625976 +450,MG,MDG,450,Madagascar,-18.766947,46.869107,27691019 +454,MW,MWI,454,Malawi,-13.2543,34.3015,19129955 +458,MY,MYS,458,Malaysia,4.210484,101.975766,32365998 +462,MV,MDV,462,Maldives,3.2028,73.2207,540542 +466,ML,MLI,466,Mali,17.570692,-3.996166,20250834 +470,MT,MLT,470,Malta,35.9375,14.3754,441539 +584,MH,MHL,584,Marshall Islands,7.1315,171.1845,58413 +478,MR,MRT,478,Mauritania,21.0079,-10.9408,4649660 +480,MU,MUS,480,Mauritius,-20.348404,57.552152,1271767 +484,MX,MEX,484,Mexico,23.6345,-102.5528,127792286 +583,FM,FSM,583,Micronesia,7.4256,150.5508,113815 +498,MD,MDA,498,Moldova,47.4116,28.3699,4027690 +492,MC,MCO,492,Monaco,43.7333,7.4167,39244 +496,MN,MNG,496,Mongolia,46.8625,103.8467,3278292 +499,ME,MNE,499,Montenegro,42.708678,19.37439,628062 +504,MA,MAR,504,Morocco,31.7917,-7.0926,36910558 +508,MZ,MOZ,508,Mozambique,-18.665695,35.529562,31255435 +516,NA,NAM,516,Namibia,-22.9576,18.4904,2540916 +520,NR,NRU,520,Nauru,-0.5228,166.9315,10834 +524,NP,NPL,524,Nepal,28.1667,84.25,29136808 +528,NL,NLD,528,Netherlands,52.1326,5.2913,17134873 +554,NZ,NZL,554,New Zealand,-40.9006,174.886,4822233 +558,NI,NIC,558,Nicaragua,12.865416,-85.207229,6624554 +562,NE,NER,562,Niger,17.607789,8.081666,24206636 +566,NG,NGA,566,Nigeria,9.082,8.6753,206139587 +807,MK,MKD,807,North Macedonia,41.6086,21.7453,2083380 +578,NO,NOR,578,Norway,60.472,8.4689,5421242 +512,OM,OMN,512,Oman,21.512583,55.923255,5106622 +586,PK,PAK,586,Pakistan,30.3753,69.3451,220892331 +585,PW,PLW,8,Palau,7.515,134.5825,18008 +591,PA,PAN,591,Panama,8.538,-80.7821,4314768 +598,PG,PNG,598,Papua New Guinea,-6.314993,143.95555,8947027 +600,PY,PRY,600,Paraguay,-23.4425,-58.4438,7132530 +604,PE,PER,604,Peru,-9.19,-75.0152,32971846 +608,PH,PHL,608,Philippines,12.879721,121.774017,109581085 +616,PL,POL,616,Poland,51.9194,19.1451,37846605 +620,PT,PRT,620,Portugal,39.3999,-8.2245,10196707 +634,QA,QAT,634,Qatar,25.3548,51.1839,2881060 +642,RO,ROU,642,Romania,45.9432,24.9668,19237682 +643,RU,RUS,643,Russia,61.52401,105.318756,145934460 +646,RW,RWA,646,Rwanda,-1.9403,29.8739,12952209 +659,KN,KNA,659,Saint Kitts and Nevis,17.357822,-62.782998,53192 +662,LC,LCA,662,Saint Lucia,13.9094,-60.9789,183629 +670,VC,VCT,670,Saint Vincent and the Grenadines,12.9843,-61.2872,110947 +882,WS,WSM,882,Samoa,-13.759,-172.1046,196130 +674,SM,SMR,674,San Marino,43.9424,12.4578,33938 +678,ST,STP,678,Sao Tome and Principe,0.1864,6.6131,219161 +682,SA,SAU,682,Saudi Arabia,23.885942,45.079162,34813867 +686,SN,SEN,686,Senegal,14.4974,-14.4524,16743930 +688,RS,SRB,688,Serbia,44.0165,21.0059,8737370 +690,SC,SYC,690,Seychelles,-4.6796,55.492,98340 +694,SL,SLE,694,Sierra Leone,8.460555,-11.779889,7976985 +702,SG,SGP,702,Singapore,1.2833,103.8333,5850343 +703,SK,SVK,703,Slovakia,48.669,19.699,5434712 +705,SI,SVN,705,Slovenia,46.1512,14.9955,2078932 +90,SB,SLB,90,Solomon Islands,-9.6457,160.1562,652858 +706,SO,SOM,706,Somalia,5.152149,46.199616,15893219 +710,ZA,ZAF,710,South Africa,-30.5595,22.9375,59308690 +728,SS,SSD,728,South Sudan,6.877,31.307,11193729 +724,ES,ESP,724,Spain,40.463667,-3.74922,46754783 +144,LK,LKA,144,Sri Lanka,7.873054,80.771797,21413250 +729,SD,SDN,729,Sudan,12.8628,30.2176,43849269 +740,SR,SUR,740,Suriname,3.9193,-56.0278,586634 +752,SE,SWE,752,Sweden,60.128161,18.643501,10099270 +756,CH,CHE,756,Switzerland,46.8182,8.2275,8654618 +760,SY,SYR,760,Syria,34.802075,38.996815,17500657 +158,TW,TWN,158,Taiwan*,23.7,121,23816775 +762,TJ,TJK,762,Tajikistan,38.861,71.2761,9537642 +834,TZ,TZA,834,Tanzania,-6.369028,34.888822,59734213 +764,TH,THA,764,Thailand,15.870032,100.992541,69799978 +626,TL,TLS,626,Timor-Leste,-8.874217,125.727539,1318442 +768,TG,TGO,768,Togo,8.6195,0.8248,8278737 +776,TO,TON,776,Tonga,-21.179,-175.1982,105697 +780,TT,TTO,780,Trinidad and Tobago,10.6918,-61.2225,1399491 +788,TN,TUN,788,Tunisia,33.886917,9.537499,11818618 +792,TR,TUR,792,Turkey,38.9637,35.2433,84339067 +798,TV,TUV,798,Tuvalu,-7.1095,177.6493,11792 +800,UG,UGA,800,Uganda,1.373333,32.290275,45741000 +804,UA,UKR,804,Ukraine,48.3794,31.1656,43733759 +784,AE,ARE,784,United Arab Emirates,23.424076,53.847818,9890400 +826,GB,GBR,826,United Kingdom,55.3781,-3.436,67886004 +858,UY,URY,858,Uruguay,-32.5228,-55.7658,3473727 +860,UZ,UZB,860,Uzbekistan,41.377491,64.585262,33469199 +548,VU,VUT,548,Vanuatu,-15.3767,166.9592,292680 +862,VE,VEN,862,Venezuela,6.4238,-66.5897,28435943 +704,VN,VNM,704,Vietnam,14.058324,108.277199,97338583 +275,PS,PSE,275,West Bank and Gaza,31.9522,35.2332,5101416 +732,EH,ESH,732,Western Sahara,24.2155,-12.8858,597330 +887,YE,YEM,887,Yemen,15.552727,48.516388,29825968 +894,ZM,ZMB,894,Zambia,-13.133897,27.849332,18383956 +716,ZW,ZWE,716,Zimbabwe,-19.015438,29.154857,14862927 +36,AU,AUS,36,Australia,-25,133,25459700 +124,CA,CAN,124,Canada,60,-95,38246108 +156,CN,CHN,156,China,35.8617,104.19545,1411778724 +840,US,USA,840,US,40,-100,329466283 diff --git a/python/semantic_kernel/__init__.py b/python/semantic_kernel/__init__.py index 8499f48aba31..297fcfd04f05 100644 --- a/python/semantic_kernel/__init__.py +++ b/python/semantic_kernel/__init__.py @@ -2,4 +2,5 @@ from semantic_kernel.kernel import Kernel -__all__ = ["Kernel"] +__version__ = "1.11.0" +__all__ = ["Kernel", "__version__"] diff --git a/python/semantic_kernel/agents/channels/agent_channel.py b/python/semantic_kernel/agents/channels/agent_channel.py index 8d8c0342dfd3..b7a56d1f4a32 100644 --- a/python/semantic_kernel/agents/channels/agent_channel.py +++ b/python/semantic_kernel/agents/channels/agent_channel.py @@ -43,7 +43,24 @@ def invoke( agent: The agent to interact with. Returns: - A async iterable of a bool, ChatMessageContent. + An async iterable of a bool, ChatMessageContent. + """ + ... + + @abstractmethod + def invoke_stream( + self, + agent: "Agent", + history: "list[ChatMessageContent]", + ) -> AsyncIterable["ChatMessageContent"]: + """Perform a discrete incremental stream interaction between a single Agent and AgentChat. + + Args: + agent: The agent to interact with. + history: The history of messages in the conversation. + + Returns: + An async iterable ChatMessageContent. """ ... diff --git a/python/semantic_kernel/agents/channels/chat_history_channel.py b/python/semantic_kernel/agents/channels/chat_history_channel.py index 7b170a60ca41..563efeaef610 100644 --- a/python/semantic_kernel/agents/channels/chat_history_channel.py +++ b/python/semantic_kernel/agents/channels/chat_history_channel.py @@ -23,8 +23,6 @@ if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent from semantic_kernel.contents.chat_history import ChatHistory - from semantic_kernel.contents.chat_message_content import ChatMessageContent - from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent @experimental_class @@ -38,7 +36,7 @@ def invoke(self, history: "ChatHistory") -> AsyncIterable["ChatMessageContent"]: ... @abstractmethod - def invoke_stream(self, history: "ChatHistory") -> AsyncIterable["StreamingChatMessageContent"]: + def invoke_stream(self, history: "ChatHistory") -> AsyncIterable["ChatMessageContent"]: """Invoke the chat history agent protocol in streaming mode.""" ... @@ -100,6 +98,36 @@ async def invoke( yield_message, ) + @override + async def invoke_stream( + self, + agent: "Agent", + messages: list[ChatMessageContent], + ) -> AsyncIterable[ChatMessageContent]: + """Perform a discrete incremental stream interaction between a single Agent and AgentChat. + + Args: + agent: The agent to interact with. + messages: The history of messages in the conversation. + + Returns: + An async iterable of bool, StreamingChatMessageContent. + """ + if not isinstance(agent, ChatHistoryAgentProtocol): + id = getattr(agent, "id", "") + raise ServiceInvalidTypeError( + f"Invalid channel binding for agent with id: `{id}` with name: ({type(agent).__name__})" + ) + + message_count = len(self.messages) + + async for response_message in agent.invoke_stream(self): + if response_message.content: + yield response_message + + for message_index in range(message_count, len(self.messages)): + messages.append(self.messages[message_index]) + def _is_message_visible(self, message: ChatMessageContent, message_queue_count: int) -> bool: """Determine if a message is visible to the user.""" return ( diff --git a/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py b/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py index 1d37b68dcc5b..7ba31b598827 100644 --- a/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py +++ b/python/semantic_kernel/agents/channels/open_ai_assistant_channel.py @@ -14,12 +14,15 @@ from semantic_kernel.agents.channels.agent_channel import AgentChannel from semantic_kernel.agents.open_ai.assistant_content_generation import create_chat_message, generate_message_content from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.exceptions.agent_exceptions import AgentChatException +from semantic_kernel.utils.experimental_decorator import experimental_class if TYPE_CHECKING: from semantic_kernel.agents.agent import Agent +@experimental_class class OpenAIAssistantChannel(AgentChannel): """OpenAI Assistant Channel.""" @@ -36,6 +39,8 @@ async def receive(self, history: list["ChatMessageContent"]) -> None: history: The conversation messages. """ for message in history: + if any(isinstance(item, FunctionCallContent) for item in message.items): + continue await create_chat_message(self.client, self.thread_id, message) @override @@ -59,6 +64,30 @@ async def invoke(self, agent: "Agent") -> AsyncIterable[tuple[bool, "ChatMessage async for is_visible, message in agent._invoke_internal(thread_id=self.thread_id): yield is_visible, message + @override + async def invoke_stream( + self, agent: "Agent", messages: list[ChatMessageContent] + ) -> AsyncIterable["ChatMessageContent"]: + """Invoke the agent stream. + + Args: + agent: The agent to invoke. + messages: The conversation messages. + + Yields: + tuple[bool, StreamingChatMessageContent]: The conversation messages. + """ + from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase + + if not isinstance(agent, OpenAIAssistantBase): + raise AgentChatException(f"Agent is not of the expected type {type(OpenAIAssistantBase)}.") + + if agent._is_deleted: + raise AgentChatException("Agent is deleted.") + + async for message in agent._invoke_internal_stream(thread_id=self.thread_id, messages=messages): + yield message + @override async def get_history(self) -> AsyncIterable["ChatMessageContent"]: """Get the conversation history. diff --git a/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py b/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py index 6e07bd06d899..fede96bccd48 100644 --- a/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py +++ b/python/semantic_kernel/agents/chat_completion/chat_completion_agent.py @@ -174,9 +174,13 @@ async def invoke_stream(self, history: ChatHistory) -> AsyncIterable[StreamingCh f"with message count: {message_count}." ) + role = None + message_builder: list[str] = [] async for message_list in messages: for message in message_list: + role = message.role message.name = self.name + message_builder.append(message.content) yield message # Capture mutated messages related function calling / tools @@ -185,6 +189,13 @@ async def invoke_stream(self, history: ChatHistory) -> AsyncIterable[StreamingCh message.name = self.name history.add_message(message) + if role != AuthorRole.TOOL: + history.add_message( + ChatMessageContent( + role=role if role else AuthorRole.ASSISTANT, content="".join(message_builder), name=self.name + ) + ) + def _setup_agent_chat_history(self, history: ChatHistory) -> ChatHistory: """Setup the agent chat history.""" chat = [] diff --git a/python/semantic_kernel/agents/group_chat/agent_chat.py b/python/semantic_kernel/agents/group_chat/agent_chat.py index dcd44b2bed6b..294f695cbb1b 100644 --- a/python/semantic_kernel/agents/group_chat/agent_chat.py +++ b/python/semantic_kernel/agents/group_chat/agent_chat.py @@ -3,9 +3,7 @@ import asyncio import logging import threading -from abc import abstractmethod from collections.abc import AsyncGenerator, AsyncIterable -from typing import Protocol, runtime_checkable from pydantic import Field, PrivateAttr @@ -23,17 +21,6 @@ logger: logging.Logger = logging.getLogger(__name__) -@experimental_class -@runtime_checkable -class AgentChatProtocol(Protocol): - """A protocol for agent chat.""" - - @abstractmethod - async def invoke(self, agent: Agent) -> AsyncIterable[ChatMessageContent]: - """Invoke an agent asynchronously.""" - ... - - @experimental_class class AgentChat(KernelBaseModel): """A base class chat interface for agents.""" @@ -162,7 +149,32 @@ async def invoke_agent(self, agent: Agent) -> AsyncIterable[ChatMessageContent]: # Broadcast message to other channels (in parallel) # Note: Able to queue messages without synchronizing channels. - channel_refs = [ChannelReference(channel=channel, hash=key) for key, channel in self.agent_channels.items()] + channel_refs = [ + ChannelReference(channel=ch, hash=key) for key, ch in self.agent_channels.items() if ch != channel + ] + await self.broadcast_queue.enqueue(channel_refs, messages) + finally: + self.clear_activity_signal() + + async def invoke_agent_stream(self, agent: Agent) -> AsyncIterable[ChatMessageContent]: + """Invoke an agent stream asynchronously.""" + self.set_activity_or_throw() + logger.info(f"Invoking agent {agent.name}") + try: + channel: AgentChannel = await self._get_or_create_channel(agent) + messages: list[ChatMessageContent] = [] + + async for message in channel.invoke_stream(agent, messages): + yield message + + for message in messages: + self.history.messages.append(message) + + # Broadcast message to other channels (in parallel) + # Note: Able to queue messages without synchronizing channels. + channel_refs = [ + ChannelReference(channel=ch, hash=key) for key, ch in self.agent_channels.items() if ch != channel + ] await self.broadcast_queue.enqueue(channel_refs, messages) finally: self.clear_activity_signal() diff --git a/python/semantic_kernel/agents/group_chat/agent_group_chat.py b/python/semantic_kernel/agents/group_chat/agent_group_chat.py index 6aa60242de52..38d0d73af0ab 100644 --- a/python/semantic_kernel/agents/group_chat/agent_group_chat.py +++ b/python/semantic_kernel/agents/group_chat/agent_group_chat.py @@ -16,10 +16,12 @@ from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException +from semantic_kernel.utils.experimental_decorator import experimental_class logger: logging.Logger = logging.getLogger(__name__) +@experimental_class class AgentGroupChat(AgentChat): """An agent chat that supports multi-turn interactions.""" @@ -85,6 +87,20 @@ async def invoke_single_turn(self, agent: Agent) -> AsyncIterable[ChatMessageCon self.is_complete = await task yield message + async def invoke_stream_single_turn(self, agent: Agent) -> AsyncIterable[ChatMessageContent]: + """Invoke the agent chat for a single turn. + + Args: + agent: The agent to invoke. + + Yields: + The chat message. + """ + async for message in self.invoke_stream(agent, is_joining=True): + yield message + + self.is_complete = await self.termination_strategy.should_terminate(agent, self.history.messages) + async def invoke(self, agent: Agent | None = None, is_joining: bool = True) -> AsyncIterable[ChatMessageContent]: """Invoke the agent chat asynchronously. @@ -109,7 +125,7 @@ async def invoke(self, agent: Agent | None = None, is_joining: bool = True) -> A return - if self.agents is None: + if not self.agents: raise AgentChatException("No agents are available") if self.is_complete: @@ -133,3 +149,53 @@ async def invoke(self, agent: Agent | None = None, is_joining: bool = True) -> A if self.is_complete: break + + async def invoke_stream( + self, agent: Agent | None = None, is_joining: bool = True + ) -> AsyncIterable[ChatMessageContent]: + """Invoke the agent chat stream asynchronously. + + Handles both group interactions and single agent interactions based on the provided arguments. + + Args: + agent: The agent to invoke. If not provided, the method processes all agents in the chat. + is_joining: Controls whether the agent joins the chat. Defaults to True. + + Yields: + The chat message. + """ + if agent is not None: + if is_joining: + self.add_agent(agent) + + async for message in super().invoke_agent_stream(agent): + if message.role == AuthorRole.ASSISTANT: + task = self.termination_strategy.should_terminate(agent, self.history.messages) + self.is_complete = await task + yield message + + return + + if not self.agents: + raise AgentChatException("No agents are available") + + if self.is_complete: + if not self.termination_strategy.automatic_reset: + raise AgentChatException("Chat is already complete") + + self.is_complete = False + + for _ in range(self.termination_strategy.maximum_iterations): + try: + selected_agent = await self.selection_strategy.next(self.agents, self.history.messages) + except Exception as ex: + logger.error(f"Failed to select agent: {ex}") + raise AgentChatException("Failed to select agent") from ex + + async for message in super().invoke_agent_stream(selected_agent): + yield message + + self.is_complete = await self.termination_strategy.should_terminate(selected_agent, self.history.messages) + + if self.is_complete: + break diff --git a/python/semantic_kernel/agents/open_ai/assistant_content_generation.py b/python/semantic_kernel/agents/open_ai/assistant_content_generation.py index 858c17817ae0..5c75cc6d9e3e 100644 --- a/python/semantic_kernel/agents/open_ai/assistant_content_generation.py +++ b/python/semantic_kernel/agents/open_ai/assistant_content_generation.py @@ -3,8 +3,14 @@ from typing import TYPE_CHECKING, Any from openai import AsyncOpenAI +from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation +from openai.types.beta.threads.file_path_delta_annotation import FilePathDeltaAnnotation from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock +from openai.types.beta.threads.image_file_delta_block import ImageFileDeltaBlock +from openai.types.beta.threads.message_delta_event import MessageDeltaEvent +from openai.types.beta.threads.runs.code_interpreter_tool_call import CodeInterpreter from openai.types.beta.threads.text_content_block import TextContentBlock +from openai.types.beta.threads.text_delta_block import TextDeltaBlock from semantic_kernel.contents.annotation_content import AnnotationContent from semantic_kernel.contents.chat_message_content import ChatMessageContent @@ -12,15 +18,22 @@ from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent +from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException +from semantic_kernel.utils.experimental_decorator import experimental_function if TYPE_CHECKING: from openai.resources.beta.threads.messages import Message from openai.resources.beta.threads.runs.runs import Run from openai.types.beta.threads.annotation import Annotation + from openai.types.beta.threads.runs import RunStep from openai.types.beta.threads.runs.tool_call import ToolCall + from openai.types.beta.threads.runs.tool_calls_step_details import ToolCallsStepDetails ################################################################### @@ -30,6 +43,7 @@ ################################################################### +@experimental_function async def create_chat_message( client: AsyncOpenAI, thread_id: str, @@ -47,7 +61,7 @@ async def create_chat_message( Returns: Message: The message. """ - if message.role.value not in allowed_message_roles: + if message.role.value not in allowed_message_roles and message.role != AuthorRole.TOOL: raise AgentExecutionException( f"Invalid message role `{message.role.value}`. Allowed roles are {allowed_message_roles}." ) @@ -56,11 +70,12 @@ async def create_chat_message( return await client.beta.threads.messages.create( thread_id=thread_id, - role=message.role.value, # type: ignore + role="assistant" if message.role == AuthorRole.TOOL else message.role.value, # type: ignore content=message_contents, # type: ignore ) +@experimental_function def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: """Get the message contents. @@ -78,14 +93,33 @@ def get_message_contents(message: "ChatMessageContent") -> list[dict[str, Any]]: "type": "image_file", "image_file": {"file_id": content.file_id}, }) + elif isinstance(content, FunctionResultContent): + contents.append({"type": "text", "text": content.result}) return contents -def generate_message_content(assistant_name: str, message: "Message") -> ChatMessageContent: +@experimental_function +def generate_message_content( + assistant_name: str, message: "Message", completed_step: "RunStep | None" = None +) -> ChatMessageContent: """Generate message content.""" role = AuthorRole(message.role) - content: ChatMessageContent = ChatMessageContent(role=role, name=assistant_name) # type: ignore + metadata = ( + { + "created_at": completed_step.created_at, + "message_id": message.id, # message needs to be defined in context + "step_id": completed_step.id, + "run_id": completed_step.run_id, + "thread_id": completed_step.thread_id, + "assistant_id": completed_step.assistant_id, + "usage": completed_step.usage, + } + if completed_step is not None + else None + ) + + content: ChatMessageContent = ChatMessageContent(role=role, name=assistant_name, metadata=metadata) # type: ignore for item_content in message.content: if item_content.type == "text": @@ -107,6 +141,49 @@ def generate_message_content(assistant_name: str, message: "Message") -> ChatMes return content +@experimental_function +def generate_streaming_message_content( + assistant_name: str, message_delta_event: "MessageDeltaEvent" +) -> StreamingChatMessageContent: + """Generate streaming message content from a MessageDeltaEvent.""" + delta = message_delta_event.delta + + # Determine the role + role = AuthorRole(delta.role) if delta.role is not None else AuthorRole("assistant") + + items: list[StreamingTextContent | StreamingAnnotationContent | StreamingFileReferenceContent] = [] + + # Process each content block in the delta + for delta_block in delta.content or []: + if delta_block.type == "text": + assert isinstance(delta_block, TextDeltaBlock) # nosec + if delta_block.text and delta_block.text.value: # Ensure text is not None + text_value = delta_block.text.value + items.append( + StreamingTextContent( + text=text_value, + choice_index=delta_block.index, + ) + ) + # Process annotations if any + if delta_block.text.annotations: + for annotation in delta_block.text.annotations or []: + if isinstance(annotation, (FileCitationDeltaAnnotation, FilePathDeltaAnnotation)): + items.append(generate_streaming_annotation_content(annotation)) + elif delta_block.type == "image_file": + assert isinstance(delta_block, ImageFileDeltaBlock) # nosec + if delta_block.image_file and delta_block.image_file.file_id: + file_id = delta_block.image_file.file_id + items.append( + StreamingFileReferenceContent( + file_id=file_id, + ) + ) + + return StreamingChatMessageContent(role=role, name=assistant_name, items=items, choice_index=0) # type: ignore + + +@experimental_function def generate_function_call_content(agent_name: str, fccs: list[FunctionCallContent]) -> ChatMessageContent: """Generate function call content. @@ -120,6 +197,7 @@ def generate_function_call_content(agent_name: str, fccs: list[FunctionCallConte return ChatMessageContent(role=AuthorRole.TOOL, name=agent_name, items=fccs) # type: ignore +@experimental_function def generate_function_result_content( agent_name: str, function_step: FunctionCallContent, tool_call: "ToolCall" ) -> ChatMessageContent: @@ -136,6 +214,7 @@ def generate_function_result_content( return function_call_content +@experimental_function def get_function_call_contents(run: "Run", function_steps: dict[str, FunctionCallContent]) -> list[FunctionCallContent]: """Extract function call contents from the run. @@ -162,6 +241,7 @@ def get_function_call_contents(run: "Run", function_steps: dict[str, FunctionCal return function_call_contents +@experimental_function def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessageContent": """Generate code interpreter content. @@ -180,6 +260,57 @@ def generate_code_interpreter_content(agent_name: str, code: str) -> "ChatMessag ) +@experimental_function +def generate_streaming_tools_content( + agent_name: str, step_details: "ToolCallsStepDetails" +) -> "StreamingChatMessageContent | None": + """Generate code interpreter content. + + Args: + agent_name: The agent name. + step_details: The current step details. + + Returns: + StreamingChatMessageContent: The chat message content. + """ + items: list[StreamingTextContent | StreamingFileReferenceContent] = [] + + metadata: dict[str, bool] = {} + for index, tool in enumerate(step_details.tool_calls): + if tool.type != "code_interpreter": + continue + if tool.code_interpreter.input: + items.append( + StreamingTextContent( + choice_index=index, + text=tool.code_interpreter.input, + ) + ) + metadata["code"] = True + if len(tool.code_interpreter.outputs) > 0: + for output in tool.code_interpreter.outputs: + assert isinstance(output, CodeInterpreter) # nosec + if output.image.file_id: + items.append( + StreamingFileReferenceContent( + file_id=output.image.file_id, + ) + ) + + return ( + StreamingChatMessageContent( + role=AuthorRole.TOOL, + name=agent_name, + items=items, # type: ignore + choice_index=0, + metadata=metadata if metadata else None, + ) + if len(items) > 0 + else None + ) + + +@experimental_function def generate_annotation_content(annotation: "Annotation") -> AnnotationContent: """Generate annotation content.""" file_id = None @@ -194,3 +325,20 @@ def generate_annotation_content(annotation: "Annotation") -> AnnotationContent: start_index=annotation.start_index, end_index=annotation.end_index, ) + + +@experimental_function +def generate_streaming_annotation_content(annotation: "Annotation") -> StreamingAnnotationContent: + """Generate streaming annotation content.""" + file_id = None + if hasattr(annotation, "file_path") and annotation.file_path: + file_id = annotation.file_path.file_id if annotation.file_path.file_id else None + elif hasattr(annotation, "file_citation") and annotation.file_citation: + file_id = annotation.file_citation.file_id if annotation.file_citation.file_id else None + + return StreamingAnnotationContent( + file_id=file_id, + quote=annotation.text, + start_index=annotation.start_index, + end_index=annotation.end_index, + ) diff --git a/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py b/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py index fe6e48bd0356..e9f744427810 100644 --- a/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py +++ b/python/semantic_kernel/agents/open_ai/azure_assistant_agent.py @@ -62,6 +62,7 @@ def __init__( max_prompt_tokens: int | None = None, parallel_tool_calls_enabled: bool | None = True, truncation_message_count: int | None = None, + token_endpoint: str | None = None, **kwargs: Any, ) -> None: """Initialize an Azure OpenAI Assistant Agent. @@ -95,6 +96,7 @@ def __init__( max_prompt_tokens: The maximum prompt tokens. (optional) parallel_tool_calls_enabled: Enable parallel tool calls. (optional) truncation_message_count: The truncation message count. (optional) + token_endpoint: The Azure AD token endpoint. (optional) **kwargs: Additional keyword arguments. Raises: @@ -107,22 +109,35 @@ def __init__( api_version=api_version, env_file_path=env_file_path, env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, ) if not azure_openai_settings.chat_deployment_name: raise AgentInitializationException("The Azure OpenAI chat_deployment_name is required.") - if not azure_openai_settings.api_key and not ad_token and not ad_token_provider: + if ( + client is None + and azure_openai_settings.api_key is None + and ad_token_provider is None + and ad_token is None + and azure_openai_settings.token_endpoint + ): + ad_token = azure_openai_settings.get_azure_openai_auth_token( + token_endpoint=azure_openai_settings.token_endpoint + ) + + if not client and not azure_openai_settings.api_key and not ad_token and not ad_token_provider: raise AgentInitializationException("Please provide either api_key, ad_token or ad_token_provider.") - client = self._create_client( - api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, - endpoint=azure_openai_settings.endpoint, - api_version=azure_openai_settings.api_version, - ad_token=ad_token, - ad_token_provider=ad_token_provider, - default_headers=default_headers, - ) + if not client: + client = self._create_client( + api_key=azure_openai_settings.api_key.get_secret_value() if azure_openai_settings.api_key else None, + endpoint=azure_openai_settings.endpoint, + api_version=azure_openai_settings.api_version, + ad_token=ad_token, + ad_token_provider=ad_token_provider, + default_headers=default_headers, + ) service_id = service_id if service_id else DEFAULT_SERVICE_NAME args: dict[str, Any] = { @@ -343,6 +358,7 @@ def _create_azure_openai_settings( api_version: str | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, + token_endpoint: str | None = None, ) -> AzureOpenAISettings: """Create the Azure OpenAI settings. @@ -353,6 +369,7 @@ def _create_azure_openai_settings( api_version: The Azure OpenAI API version. env_file_path: The environment file path. env_file_encoding: The environment file encoding. + token_endpoint: The Azure AD token endpoint. Returns: An instance of the AzureOpenAISettings. @@ -365,6 +382,7 @@ def _create_azure_openai_settings( api_version=api_version, env_file_path=env_file_path, env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, ) except ValidationError as ex: raise AgentInitializationException("Failed to create Azure OpenAI settings.", ex) from ex @@ -379,7 +397,7 @@ async def list_definitions(self) -> AsyncIterable[dict[str, Any]]: """ assistants = await self.client.beta.assistants.list(order="desc") for assistant in assistants.data: - yield self._create_open_ai_assistant_definition(assistant) + yield OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) @classmethod async def retrieve( @@ -439,6 +457,6 @@ async def retrieve( ) assistant = await client.beta.assistants.retrieve(id) assistant_definition = OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) - return AzureAssistantAgent(kernel=kernel, **assistant_definition) + return AzureAssistantAgent(kernel=kernel, assistant=assistant, **assistant_definition) # endregion diff --git a/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py b/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py index ad68fface2aa..0c20f67548e9 100644 --- a/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py +++ b/python/semantic_kernel/agents/open_ai/open_ai_assistant_agent.py @@ -346,7 +346,7 @@ async def list_definitions(self) -> AsyncIterable[dict[str, Any]]: """ assistants = await self.client.beta.assistants.list(order="desc") for assistant in assistants.data: - yield self._create_open_ai_assistant_definition(assistant) + yield OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) @classmethod async def retrieve( @@ -397,6 +397,6 @@ async def retrieve( ) assistant = await client.beta.assistants.retrieve(id) assistant_definition = OpenAIAssistantBase._create_open_ai_assistant_definition(assistant) - return OpenAIAssistantAgent(kernel=kernel, **assistant_definition) + return OpenAIAssistantAgent(kernel=kernel, assistant=assistant, **assistant_definition) # endregion diff --git a/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py b/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py index 70df5943840e..2ac1ba91c498 100644 --- a/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py +++ b/python/semantic_kernel/agents/open_ai/open_ai_assistant_base.py @@ -4,6 +4,7 @@ import json import logging from collections.abc import AsyncIterable, Iterable +from dataclasses import dataclass from typing import TYPE_CHECKING, Any, ClassVar, Literal from openai import AsyncOpenAI @@ -23,11 +24,16 @@ generate_function_call_content, generate_function_result_content, generate_message_content, + generate_streaming_message_content, + generate_streaming_tools_content, get_function_call_contents, get_message_contents, ) from semantic_kernel.agents.open_ai.run_polling_options import RunPollingOptions -from semantic_kernel.connectors.ai.function_calling_utils import kernel_function_metadata_to_function_call_format +from semantic_kernel.connectors.ai.function_calling_utils import ( + kernel_function_metadata_to_function_call_format, + merge_function_results, +) from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent @@ -47,6 +53,16 @@ logger: logging.Logger = logging.getLogger(__name__) +@experimental_class +@dataclass +class FunctionActionResult: + """Function Action Result.""" + + function_call_content: ChatMessageContent | None + function_result_content: ChatMessageContent | None + tool_outputs: list[dict[str, str]] | None + + @experimental_class class OpenAIAssistantBase(Agent): """OpenAI Assistant Base class. @@ -598,7 +614,7 @@ async def invoke( truncation_message_count: int | None = None, temperature: float | None = None, top_p: float | None = None, - metadata: dict[str, str] | None = {}, + metadata: dict[str, str] | None = None, **kwargs: Any, ) -> AsyncIterable[ChatMessageContent]: """Invoke the chat assistant. @@ -655,7 +671,7 @@ async def _invoke_internal( truncation_message_count: int | None = None, temperature: float | None = None, top_p: float | None = None, - metadata: dict[str, str] | None = {}, + metadata: dict[str, str] | None = None, **kwargs: Any, ) -> AsyncIterable[tuple[bool, ChatMessageContent]]: """Internal invoke method. @@ -686,6 +702,9 @@ async def _invoke_internal( if self._is_deleted: raise AgentInitializationException("The assistant has been deleted.") + if metadata is None: + metadata = {} + self._check_if_deleted() tools = self._get_tools() @@ -722,8 +741,12 @@ async def _invoke_internal( run = await self._poll_run_status(run=run, thread_id=thread_id) if run.status in self.error_message_states: + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message raise AgentInvokeException( - f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}`" + f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}` " + f"with error: {error_message}" ) # Check if function calling required @@ -735,7 +758,7 @@ async def _invoke_internal( chat_history = ChatHistory() _ = await self._invoke_function_calls(fccs=fccs, chat_history=chat_history) - tool_outputs = self._format_tool_outputs(chat_history) + tool_outputs = self._format_tool_outputs(fccs, chat_history) await self.client.beta.threads.runs.submit_tool_outputs( run_id=run.id, thread_id=thread_id, @@ -779,11 +802,193 @@ async def _invoke_internal( ) if message: content = generate_message_content(self.name, message) - if len(content.items) > 0: + if content and len(content.items) > 0: message_count += 1 yield True, content processed_step_ids.add(completed_step.id) + async def invoke_stream( + self, + thread_id: str, + *, + messages: list[ChatMessageContent] | None = None, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Invoke the chat assistant with streaming.""" + async for content in self._invoke_internal_stream( + thread_id=thread_id, + messages=messages, + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + **kwargs, + ): + yield content + + async def _invoke_internal_stream( + self, + thread_id: str, + *, + messages: list[ChatMessageContent] | None = None, + ai_model_id: str | None = None, + enable_code_interpreter: bool | None = False, + enable_file_search: bool | None = False, + enable_json_response: bool | None = None, + max_completion_tokens: int | None = None, + max_prompt_tokens: int | None = None, + parallel_tool_calls_enabled: bool | None = True, + truncation_message_count: int | None = None, + temperature: float | None = None, + top_p: float | None = None, + metadata: dict[str, str] | None = None, + **kwargs: Any, + ) -> AsyncIterable[ChatMessageContent]: + """Internal invoke method with streaming.""" + if not self.assistant: + raise AgentInitializationException("The assistant has not been created.") + + if self._is_deleted: + raise AgentInitializationException("The assistant has been deleted.") + + if metadata is None: + metadata = {} + + tools = self._get_tools() + + run_options = self._generate_options( + ai_model_id=ai_model_id, + enable_code_interpreter=enable_code_interpreter, + enable_file_search=enable_file_search, + enable_json_response=enable_json_response, + max_completion_tokens=max_completion_tokens, + max_prompt_tokens=max_prompt_tokens, + parallel_tool_calls_enabled=parallel_tool_calls_enabled, + truncation_message_count=truncation_message_count, + temperature=temperature, + top_p=top_p, + metadata=metadata, + **kwargs, + ) + + # Filter out None values to avoid passing them as kwargs + run_options = {k: v for k, v in run_options.items() if v is not None} + + stream = self.client.beta.threads.runs.stream( + assistant_id=self.assistant.id, + thread_id=thread_id, + instructions=self.assistant.instructions, + tools=tools, # type: ignore + **run_options, + ) + + function_steps: dict[str, FunctionCallContent] = {} + active_messages: dict[str, RunStep] = {} + + while True: + async with stream as response_stream: + async for event in response_stream: + if event.event == "thread.run.created": + run = event.data + logger.info(f"Assistant run created with ID: {run.id}") + elif event.event == "thread.run.in_progress": + run = event.data + logger.info(f"Assistant run in progress with ID: {run.id}") + elif event.event == "thread.message.delta": + content = generate_streaming_message_content(self.name, event.data) + yield content + elif event.event == "thread.run.step.completed": + logger.info(f"Run step completed with ID: {event.data.id}") + if hasattr(event.data.step_details, "message_creation"): + message_id = event.data.step_details.message_creation.message_id + if message_id not in active_messages: + active_messages[message_id] = event.data + elif hasattr(event.data.step_details, "tool_calls"): + tool_content = generate_streaming_tools_content(self.name, event.data.step_details) + if tool_content: + yield tool_content + elif event.event == "thread.run.requires_action": + run = event.data + function_action_result = await self._handle_streaming_requires_action(run, function_steps) + if function_action_result is None: + raise AgentInvokeException( + f"Function call required but no function steps found for agent `{self.name}` " + f"thread: {thread_id}." + ) + if function_action_result.function_result_content and messages is not None: + messages.append(function_action_result.function_result_content) + if function_action_result.function_call_content: + if messages is not None: + messages.append(function_action_result.function_call_content) + stream = self.client.beta.threads.runs.submit_tool_outputs_stream( + run_id=run.id, + thread_id=thread_id, + tool_outputs=function_action_result.tool_outputs, # type: ignore + ) + break + elif event.event == "thread.run.completed": + run = event.data + logger.info(f"Run completed with ID: {run.id}") + if len(active_messages) > 0: + for id in active_messages: + step: RunStep = active_messages[id] + message = await self._retrieve_message( + thread_id=thread_id, + message_id=id, # type: ignore + ) + + if message and message.content: + content = generate_message_content(self.name, message, step) + if messages is not None: + messages.append(content) + return + elif event.event == "thread.run.failed": + run = event.data # type: ignore + error_message = "" + if run.last_error and run.last_error.message: + error_message = run.last_error.message + raise AgentInvokeException( + f"Run failed with status: `{run.status}` for agent `{self.name}` and thread `{thread_id}` " + f"with error: {error_message}" + ) + else: + # If the inner loop completes without encountering a 'break', exit the outer loop + break + + async def _handle_streaming_requires_action( + self, run: Run, function_steps: dict[str, FunctionCallContent] + ) -> FunctionActionResult | None: + fccs = get_function_call_contents(run, function_steps) + if fccs: + function_call_content = generate_function_call_content(agent_name=self.name, fccs=fccs) + + chat_history = ChatHistory() + _ = await self._invoke_function_calls(fccs=fccs, chat_history=chat_history) + + function_result_content = merge_function_results(chat_history.messages)[0] + + tool_outputs = self._format_tool_outputs(fccs, chat_history) + return FunctionActionResult(function_call_content, function_result_content, tool_outputs) + return None + # endregion # region Agent Helper Methods @@ -982,22 +1187,27 @@ async def _invoke_function_calls(self, fccs: list[FunctionCallContent], chat_his ] return await asyncio.gather(*tasks) - def _format_tool_outputs(self, chat_history: ChatHistory) -> list[dict[str, str]]: + def _format_tool_outputs(self, fccs: list[FunctionCallContent], chat_history: ChatHistory) -> list[dict[str, str]]: """Format tool outputs from chat history for submission. Args: + fccs: The function call contents. chat_history: The chat history. Returns: The formatted tool outputs as a list of dictionaries. """ - tool_outputs = [] - for tool_call in chat_history.messages[0].items: - if isinstance(tool_call, FunctionResultContent): - tool_outputs.append({ - "tool_call_id": tool_call.id, - "output": tool_call.result, - }) - return tool_outputs + tool_call_lookup = { + tool_call.id: tool_call + for message in chat_history.messages + for tool_call in message.items + if isinstance(tool_call, FunctionResultContent) + } + + return [ + {"tool_call_id": fcc.id, "output": str(tool_call_lookup[fcc.id].result)} + for fcc in fccs + if fcc.id in tool_call_lookup + ] # endregion diff --git a/python/semantic_kernel/connectors/ai/anthropic/prompt_execution_settings/anthropic_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/anthropic/prompt_execution_settings/anthropic_prompt_execution_settings.py index 792afbe370dd..51bd4f4f7dcb 100644 --- a/python/semantic_kernel/connectors/ai/anthropic/prompt_execution_settings/anthropic_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/anthropic/prompt_execution_settings/anthropic_prompt_execution_settings.py @@ -5,7 +5,9 @@ from pydantic import Field, model_validator +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.exceptions import ServiceInvalidExecutionSettingsError logger = logging.getLogger(__name__) @@ -22,16 +24,27 @@ class AnthropicChatPromptExecutionSettings(AnthropicPromptExecutionSettings): messages: list[dict[str, Any]] | None = None stream: bool | None = None system: str | None = None - max_tokens: int | None = Field(None, gt=0) + max_tokens: int = Field(default=1024, gt=0) temperature: float | None = Field(None, ge=0.0, le=2.0) stop_sequences: list[str] | None = None top_p: float | None = Field(None, ge=0.0, le=1.0) top_k: int | None = Field(None, ge=0) + tools: list[dict[str, Any]] | None = Field( + None, + max_length=64, + description=("Do not set this manually. It is set by the service based on the function choice configuration."), + ) + tool_choice: dict[str, str] | None = Field( + None, + description="Do not set this manually. It is set by the service based on the function choice configuration.", + ) @model_validator(mode="after") - def check_function_call_behavior(self) -> "AnthropicChatPromptExecutionSettings": - """Check if the user is requesting function call behavior.""" - if self.function_choice_behavior is not None: - raise NotImplementedError("Anthropic does not support function call behavior.") - + def validate_tool_choice(self) -> "AnthropicChatPromptExecutionSettings": + """Validate tool choice. Anthropic doesn't support NONE tool choice.""" + tool_choice = self.tool_choice + + if tool_choice and tool_choice.get("type") == FunctionChoiceType.NONE.value: + raise ServiceInvalidExecutionSettingsError("Tool choice 'none' is not supported by Anthropic.") + return self diff --git a/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py b/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py index 3b9a7de99182..54557ee8bb02 100644 --- a/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/anthropic/services/anthropic_chat_completion.py @@ -1,17 +1,25 @@ # Copyright (c) Microsoft. All rights reserved. +import json import logging +import sys from collections.abc import AsyncGenerator -from typing import Any +from typing import Any, ClassVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover from anthropic import AsyncAnthropic +from anthropic.lib.streaming._types import TextEvent from anthropic.types import ( ContentBlockStopEvent, Message, - RawContentBlockDeltaEvent, RawMessageDeltaEvent, RawMessageStartEvent, TextBlock, + ToolUseBlock, ) from pydantic import ValidationError @@ -20,9 +28,13 @@ ) from semantic_kernel.connectors.ai.anthropic.settings.anthropic_settings import AnthropicSettings from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent @@ -31,9 +43,15 @@ from semantic_kernel.contents.utils.finish_reason import FinishReason as SemanticKernelFinishReason from semantic_kernel.exceptions.service_exceptions import ( ServiceInitializationError, + ServiceInvalidResponseError, ServiceResponseException, ) +from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) # map finish reasons from Anthropic to Semantic Kernel ANTHROPIC_TO_SEMANTIC_KERNEL_FINISH_REASON_MAP = { @@ -49,8 +67,11 @@ class AnthropicChatCompletion(ChatCompletionClientBase): """Antropic ChatCompletion class.""" + MODEL_PROVIDER_NAME: ClassVar[str] = "anthropic" + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True + async_client: AsyncAnthropic - + def __init__( self, ai_model_id: str | None = None, @@ -68,10 +89,10 @@ def __init__( service_id: Service ID tied to the execution settings. api_key: The optional API key to use. If provided will override, the env vars or .env file value. - async_client: An existing client to use. + async_client: An existing client to use. env_file_path: Use the environment settings file as a fallback - to environment variables. - env_file_encoding: The encoding of the environment settings file. + to environment variables. + env_file_encoding: The encoding of the environment settings file. """ try: anthropic_settings = AnthropicSettings.create( @@ -82,7 +103,7 @@ def __init__( ) except ValidationError as ex: raise ServiceInitializationError("Failed to create Anthropic settings.", ex) from ex - + if not anthropic_settings.chat_model_id: raise ServiceInitializationError("The Anthropic chat model ID is required.") @@ -97,153 +118,318 @@ def __init__( ai_model_id=anthropic_settings.chat_model_id, ) - async def get_chat_message_contents( + # region Overriding base class methods + + # Override from AIServiceClientBase + @override + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return AnthropicChatPromptExecutionSettings + + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + return str(self.async_client.base_url) + + @override + @trace_chat_completion(MODEL_PROVIDER_NAME) + async def _inner_get_chat_message_contents( self, chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> list["ChatMessageContent"]: - """Executes a chat completion request and returns the result. + ) -> list["ChatMessageContent"]: + if not isinstance(settings, AnthropicChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, AnthropicChatPromptExecutionSettings) # nosec - Args: - chat_history: The chat history to use for the chat completion. - settings: The settings to use for the chat completion request. - kwargs: The optional arguments. + settings.ai_model_id = settings.ai_model_id or self.ai_model_id + settings.messages, parsed_system_message = self._prepare_chat_history_for_request(chat_history) + if settings.system is None and parsed_system_message is not None: + settings.system = parsed_system_message - Returns: - The completion result(s). - """ + return await self._send_chat_request(settings) + + @override + @trace_streaming_chat_completion(MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: if not isinstance(settings, AnthropicChatPromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, AnthropicChatPromptExecutionSettings) # nosec - if not settings.ai_model_id: - settings.ai_model_id = self.ai_model_id + settings.messages, parsed_system_message = self._prepare_chat_history_for_request(chat_history, stream=True) + settings.ai_model_id = settings.ai_model_id or self.ai_model_id + if settings.system is None and parsed_system_message is not None: + settings.system = parsed_system_message - settings.messages = self._prepare_chat_history_for_request(chat_history) - try: - response = await self.async_client.messages.create(**settings.prepare_settings_dict()) - except Exception as ex: - raise ServiceResponseException( - f"{type(self)} service failed to complete the prompt", - ex, - ) from ex - - metadata: dict[str, Any] = {"id": response.id} - # Check if usage exists and has a value, then add it to the metadata - if hasattr(response, "usage") and response.usage is not None: - metadata["usage"] = response.usage + response = self._send_chat_stream_request(settings) + if not isinstance(response, AsyncGenerator): + raise ServiceInvalidResponseError("Expected an AsyncGenerator response.") + + async for message in response: + yield message - return [self._create_chat_message_content(response, content_block, metadata) - for content_block in response.content] - - async def get_streaming_chat_message_contents( + def _prepare_chat_history_for_request( self, - chat_history: ChatHistory, - settings: PromptExecutionSettings, - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Executes a streaming chat completion request and returns the result. + chat_history: "ChatHistory", + role_key: str = "role", + content_key: str = "content", + stream: bool = False, + ) -> tuple[list[dict[str, Any]], str | None]: + """Prepare the chat history for an Anthropic request. + + Allowing customization of the key names for role/author, and optionally overriding the role. Args: - chat_history: The chat history to use for the chat completion. - settings: The settings to use for the chat completion request. - kwargs: The optional arguments. + chat_history: The chat history to prepare. + role_key: The key name for the role/author. + content_key: The key name for the content/message. + stream: Whether the request is for a streaming chat. - Yields: - A stream of StreamingChatMessageContent. + Returns: + A tuple containing the prepared chat history and the first SYSTEM message content. """ - if not isinstance(settings, AnthropicChatPromptExecutionSettings): - settings = self.get_prompt_execution_settings_from_settings(settings) - assert isinstance(settings, AnthropicChatPromptExecutionSettings) # nosec + system_message_content = None + remaining_messages: list[dict[str, Any]] = [] + system_message_found = False + for message in chat_history.messages: + # Skip system messages after the first one is found + if message.role == AuthorRole.SYSTEM: + if not system_message_found: + system_message_content = message.content + system_message_found = True + elif message.role == AuthorRole.TOOL: + # if tool result message isn't the most recent message, add it to the remaining messages + if not remaining_messages or remaining_messages[-1][role_key] != AuthorRole.USER: + remaining_messages.append({ + role_key: AuthorRole.USER, + content_key: [], + }) - if not settings.ai_model_id: - settings.ai_model_id = self.ai_model_id + # add the tool result to the most recent message + tool_results_message = remaining_messages[-1] + for item in message.items: + if isinstance(item, FunctionResultContent): + tool_results_message["content"].append({ + "type": "tool_result", + "tool_use_id": item.id, + content_key: str(item.result), + }) + elif message.finish_reason == SemanticKernelFinishReason.TOOL_CALLS: + if not stream: + if not message.inner_content: + raise ServiceInvalidResponseError( + "Expected a message with an Anthropic Message as inner content." + ) - settings.messages = self._prepare_chat_history_for_request(chat_history) - try: - async with self.async_client.messages.stream(**settings.prepare_settings_dict()) as stream: - author_role = None - metadata: dict[str, Any] = {"usage": {}, "id": None} - content_block_idx = 0 - - async for stream_event in stream: - if isinstance(stream_event, RawMessageStartEvent): - author_role = stream_event.message.role - metadata["usage"]["input_tokens"] = stream_event.message.usage.input_tokens - metadata["id"] = stream_event.message.id - elif isinstance(stream_event, (RawContentBlockDeltaEvent, RawMessageDeltaEvent)): - yield [self._create_streaming_chat_message_content(stream_event, - content_block_idx, - author_role, - metadata)] - elif isinstance(stream_event, ContentBlockStopEvent): - content_block_idx += 1 + remaining_messages.append({ + role_key: AuthorRole.ASSISTANT, + content_key: [content_block.to_dict() for content_block in message.inner_content.content], + }) + else: + content: list[TextBlock | ToolUseBlock] = [] + # for remaining items, add them to the content + for item in message.items: + if isinstance(item, TextContent): + content.append(TextBlock(text=item.text, type="text")) + elif isinstance(item, FunctionCallContent): + item_arguments = ( + item.arguments if not isinstance(item.arguments, str) else json.loads(item.arguments) + ) - except Exception as ex: - raise ServiceResponseException( - f"{type(self)} service failed to complete the request", - ex, - ) from ex + content.append( + ToolUseBlock(id=item.id, input=item_arguments, name=item.name, type="tool_use") + ) + + remaining_messages.append({ + role_key: AuthorRole.ASSISTANT, + content_key: content, + }) + else: + # The API requires only role and content keys for the remaining messages + remaining_messages.append({ + role_key: getattr(message, role_key), + content_key: getattr(message, content_key), + }) + + return remaining_messages, system_message_content + + # endregion def _create_chat_message_content( - self, - response: Message, - content: TextBlock, - response_metadata: dict[str, Any] + self, response: Message, response_metadata: dict[str, Any] ) -> "ChatMessageContent": """Create a chat message content object.""" items: list[ITEM_TYPES] = [] - - if content.text: - items.append(TextContent(text=content.text)) + items += self._get_tool_calls_from_message(response) + + for content_block in response.content: + if isinstance(content_block, TextBlock): + items.append(TextContent(text=content_block.text)) finish_reason = None if response.stop_reason: finish_reason = ANTHROPIC_TO_SEMANTIC_KERNEL_FINISH_REASON_MAP[response.stop_reason] - + return ChatMessageContent( inner_content=response, ai_model_id=self.ai_model_id, metadata=response_metadata, - role=AuthorRole(response.role), + role=AuthorRole.ASSISTANT, items=items, finish_reason=finish_reason, ) def _create_streaming_chat_message_content( - self, - stream_event: RawContentBlockDeltaEvent | RawMessageDeltaEvent, - content_block_idx: int, - role: str | None = None, - metadata: dict[str, Any] = {} + self, + stream_event: TextEvent | ContentBlockStopEvent | RawMessageDeltaEvent, + metadata: dict[str, Any] = {}, ) -> StreamingChatMessageContent: - """Create a streaming chat message content object from a choice.""" - text_content = "" - - if stream_event.delta and hasattr(stream_event.delta, "text"): - text_content = stream_event.delta.text - - items: list[STREAMING_ITEM_TYPES] = [StreamingTextContent(choice_index=content_block_idx, text=text_content)] - + """Create a streaming chat message content object from a content block.""" + items: list[STREAMING_ITEM_TYPES] = [] finish_reason = None - if isinstance(stream_event, RawMessageDeltaEvent): - if stream_event.delta.stop_reason: - finish_reason = ANTHROPIC_TO_SEMANTIC_KERNEL_FINISH_REASON_MAP[stream_event.delta.stop_reason] + if isinstance(stream_event, TextEvent): + items.append(StreamingTextContent(choice_index=0, text=stream_event.text)) + elif ( + isinstance(stream_event, ContentBlockStopEvent) + and hasattr(stream_event, "content_block") + and stream_event.content_block.type == "tool_use" + ): + tool_use_block = stream_event.content_block + items.append( + FunctionCallContent( + id=tool_use_block.id, + index=stream_event.index, + name=tool_use_block.name, + arguments=json.dumps(tool_use_block.input) if tool_use_block.input else None, + ) + ) + elif isinstance(stream_event, RawMessageDeltaEvent): + finish_reason = ANTHROPIC_TO_SEMANTIC_KERNEL_FINISH_REASON_MAP[str(stream_event.delta.stop_reason)] metadata["usage"]["output_tokens"] = stream_event.usage.output_tokens return StreamingChatMessageContent( - choice_index=content_block_idx, + choice_index=0, inner_content=stream_event, ai_model_id=self.ai_model_id, metadata=metadata, - role=AuthorRole(role) if role else AuthorRole.ASSISTANT, + role=AuthorRole.ASSISTANT, finish_reason=finish_reason, items=items, ) - def get_prompt_execution_settings_class(self) -> "type[AnthropicChatPromptExecutionSettings]": - """Create a request settings object.""" - return AnthropicChatPromptExecutionSettings - + def update_settings_from_function_call_configuration_anthropic( + self, + function_choice_configuration: FunctionCallChoiceConfiguration, + settings: "PromptExecutionSettings", + type: "FunctionChoiceType", + ) -> None: + """Update the settings from a FunctionChoiceConfiguration.""" + if ( + function_choice_configuration.available_functions + and hasattr(settings, "tools") + and hasattr(settings, "tool_choice") + ): + settings.tools = [ + self.kernel_function_metadata_to_function_call_format_anthropic(f) + for f in function_choice_configuration.available_functions + ] + + if ( + settings.function_choice_behavior + and settings.function_choice_behavior.type_ == FunctionChoiceType.REQUIRED + ) or type == FunctionChoiceType.REQUIRED: + settings.tool_choice = {"type": "any"} + else: + settings.tool_choice = {"type": type.value} + + def kernel_function_metadata_to_function_call_format_anthropic( + self, + metadata: KernelFunctionMetadata, + ) -> dict[str, Any]: + """Convert the kernel function metadata to function calling format.""" + return { + "name": metadata.fully_qualified_name, + "description": metadata.description or "", + "input_schema": { + "type": "object", + "properties": {p.name: p.schema_data for p in metadata.parameters}, + "required": [p.name for p in metadata.parameters if p.is_required], + }, + } + + @override + def _update_function_choice_settings_callback(self): + return self.update_settings_from_function_call_configuration_anthropic + + async def _send_chat_request(self, settings: AnthropicChatPromptExecutionSettings) -> list["ChatMessageContent"]: + """Send the chat request.""" + try: + response = await self.async_client.messages.create(**settings.prepare_settings_dict()) + except Exception as ex: + raise ServiceResponseException( + f"{type(self)} service failed to complete the request", + ex, + ) from ex + + response_metadata: dict[str, Any] = {"id": response.id} + if hasattr(response, "usage") and response.usage is not None: + response_metadata["usage"] = response.usage + + return [self._create_chat_message_content(response, response_metadata)] + + async def _send_chat_stream_request( + self, settings: AnthropicChatPromptExecutionSettings + ) -> AsyncGenerator[list["StreamingChatMessageContent"], None]: + """Send the chat stream request. + + The stream yields a sequence of stream events, which are used to create streaming chat message content: + - RawMessageStartEvent is used to determine the message id and input tokens. + - RawMessageDeltaEvent is used to determine the finish reason. + - TextEvent is used to determine the text content and ContentBlockStopEvent is used to determine + the tool use content. + """ + try: + async with self.async_client.messages.stream(**settings.prepare_settings_dict()) as stream: + metadata: dict[str, Any] = {"usage": {}, "id": None} + async for stream_event in stream: + if isinstance(stream_event, RawMessageStartEvent): + metadata["usage"]["input_tokens"] = stream_event.message.usage.input_tokens + metadata["id"] = stream_event.message.id + elif isinstance(stream_event, (TextEvent, RawMessageDeltaEvent)) or ( + isinstance(stream_event, ContentBlockStopEvent) + and stream_event.content_block.type == "tool_use" + ): + yield [self._create_streaming_chat_message_content(stream_event, metadata)] + except Exception as ex: + raise ServiceResponseException( + f"{type(self)} service failed to complete the request", + ex, + ) from ex + + def _get_tool_calls_from_message(self, message: Message) -> list[FunctionCallContent]: + """Get tool calls from a content blocks.""" + tool_calls: list[FunctionCallContent] = [] + + for idx, content_block in enumerate(message.content): + if isinstance(content_block, ToolUseBlock): + tool_calls.append( + FunctionCallContent( + id=content_block.id, + index=idx, + name=content_block.name, + arguments=getattr(content_block, "input", None), + ) + ) + + return tool_calls + + @override + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if hasattr(settings, "tool_choice"): + settings.tool_choice = None + if hasattr(settings, "tools"): + settings.tools = None diff --git a/python/semantic_kernel/connectors/ai/anthropic/settings/anthropic_settings.py b/python/semantic_kernel/connectors/ai/anthropic/settings/anthropic_settings.py index 4c4b01a352ee..f69addbdc02e 100644 --- a/python/semantic_kernel/connectors/ai/anthropic/settings/anthropic_settings.py +++ b/python/semantic_kernel/connectors/ai/anthropic/settings/anthropic_settings.py @@ -16,9 +16,9 @@ class AnthropicSettings(KernelBaseSettings): however, validation will fail alerting that the settings are missing. Optional settings for prefix 'ANTHROPIC_' are: - - api_key: ANTHROPIC API key, see https://console.mistral.ai/api-keys + - api_key: ANTHROPIC API key, see https://console.anthropic.com/settings/keys (Env var ANTHROPIC_API_KEY) - - chat_model_id: The Anthropic chat model ID to use see https://docs.mistral.ai/getting-started/models/. + - chat_model_id: The Anthropic chat model ID to use see https://docs.anthropic.com/en/docs/about-claude/models. (Env var ANTHROPIC_CHAT_MODEL_ID) - env_file_path: if provided, the .env settings are read from this file path location """ diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py index 32550fa71697..3d64c38ce5bc 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_base.py @@ -3,6 +3,7 @@ import asyncio import contextlib from abc import ABC +from typing import ClassVar from azure.ai.inference.aio import ChatCompletionsClient, EmbeddingsClient @@ -14,6 +15,8 @@ class AzureAIInferenceBase(KernelBaseModel, ABC): """Azure AI Inference Chat Completion Service.""" + MODEL_PROVIDER_NAME: ClassVar[str] = "azureai" + client: ChatCompletionsClient | EmbeddingsClient def __del__(self) -> None: diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py index bcccd2c28d7f..090faa9873db 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_chat_completion.py @@ -1,13 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio import logging import sys -from collections.abc import AsyncGenerator -from functools import reduce -from typing import TYPE_CHECKING, Any - -from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT +from collections.abc import AsyncGenerator, Callable +from typing import TYPE_CHECKING, Any, ClassVar if sys.version_info >= (3, 12): from typing import override # pragma: no cover @@ -25,6 +21,7 @@ StreamingChatCompletionsUpdate, ) from azure.core.credentials import AzureKeyCredential +from azure.identity import DefaultAzureCredential from pydantic import ValidationError from semantic_kernel.connectors.ai.azure_ai_inference import ( @@ -34,8 +31,11 @@ from semantic_kernel.connectors.ai.azure_ai_inference.services.azure_ai_inference_base import AzureAIInferenceBase from semantic_kernel.connectors.ai.azure_ai_inference.services.utils import MESSAGE_CONVERTERS from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration from semantic_kernel.connectors.ai.function_calling_utils import update_settings_from_function_call_configuration -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent @@ -49,9 +49,12 @@ ServiceInitializationError, ServiceInvalidExecutionSettingsError, ) -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) +from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -63,6 +66,8 @@ class AzureAIInferenceChatCompletion(ChatCompletionClientBase, AzureAIInferenceBase): """Azure AI Inference Chat Completion Service.""" + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True + def __init__( self, ai_model_id: str, @@ -103,11 +108,24 @@ def __init__( except ValidationError as e: raise ServiceInitializationError(f"Failed to validate Azure AI Inference settings: {e}") from e - client = ChatCompletionsClient( - endpoint=str(azure_ai_inference_settings.endpoint), - credential=AzureKeyCredential(azure_ai_inference_settings.api_key.get_secret_value()), - user_agent=SEMANTIC_KERNEL_USER_AGENT, - ) + endpoint_to_use: str = str(azure_ai_inference_settings.endpoint) + if azure_ai_inference_settings.api_key is not None: + client = ChatCompletionsClient( + endpoint=endpoint_to_use, + credential=AzureKeyCredential(azure_ai_inference_settings.api_key.get_secret_value()), + user_agent=SEMANTIC_KERNEL_USER_AGENT, + ) + else: + # Try to create the client with a DefaultAzureCredential + client = ( + ChatCompletionsClient( + endpoint=endpoint_to_use, + credential=DefaultAzureCredential(), + credential_scopes=["https://cognitiveservices.azure.com/.default"], + api_version=DEFAULT_AZURE_API_VERSION, + user_agent=SEMANTIC_KERNEL_USER_AGENT, + ), + ) super().__init__( ai_model_id=ai_model_id, @@ -115,67 +133,32 @@ def __init__( client=client, ) - # region Non-streaming - async def get_chat_message_contents( - self, - chat_history: ChatHistory, - settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> list[ChatMessageContent]: - """Get chat message contents from the Azure AI Inference service. + # region Overriding base class methods - Args: - chat_history: A list of chats in a chat_history object. - settings: Settings for the request. - kwargs: Optional arguments. - - Returns: - A list of chat message contents. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) - assert isinstance(settings, AzureAIInferenceChatPromptExecutionSettings) # nosec + # Override from AIServiceClientBase + @override + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return AzureAIInferenceChatPromptExecutionSettings - kernel = kwargs.get("kernel") - if settings.function_choice_behavior is not None and (not kernel or not isinstance(kernel, Kernel)): - raise ServiceInvalidExecutionSettingsError("Kernel is required for auto invoking functions.") - - if kernel and settings.function_choice_behavior: - self._verify_function_choice_behavior(settings) - self._configure_function_choice_behavior(settings, kernel) - - if ( - settings.function_choice_behavior is None - or not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - return await self._send_chat_request(chat_history, settings) - - for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): - completions = await self._send_chat_request(chat_history, settings) - chat_history.add_message(message=completions[0]) - function_calls = [item for item in chat_history.messages[-1].items if isinstance(item, FunctionCallContent)] - if (fc_count := len(function_calls)) == 0: - return completions - - results = await self._invoke_function_calls( - function_calls=function_calls, - chat_history=chat_history, - kernel=kernel, # type: ignore - arguments=kwargs.get("arguments", None), - function_call_count=fc_count, - request_index=request_index, - function_behavior=settings.function_choice_behavior, - ) + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + if hasattr(self.client, "_client") and hasattr(self.client._client, "_base_url"): + # Best effort to get the endpoint + return self.client._client._base_url + return None - if any(result.terminate for result in results if result is not None): - return completions - else: - # do a final call without auto function calling - return await self._send_chat_request(chat_history, settings) + @override + @trace_chat_completion(AzureAIInferenceBase.MODEL_PROVIDER_NAME) + async def _inner_get_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> list["ChatMessageContent"]: + if not isinstance(settings, AzureAIInferenceChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, AzureAIInferenceChatPromptExecutionSettings) # nosec - async def _send_chat_request( - self, chat_history: ChatHistory, settings: AzureAIInferenceChatPromptExecutionSettings - ) -> list[ChatMessageContent]: - """Send a chat request to the Azure AI Inference service.""" assert isinstance(self.client, ChatCompletionsClient) # nosec response: ChatCompletions = await self.client.complete( messages=self._prepare_chat_history_for_request(chat_history), @@ -186,6 +169,78 @@ async def _send_chat_request( return [self._create_chat_message_content(response, choice, response_metadata) for choice in response.choices] + @override + @trace_streaming_chat_completion(AzureAIInferenceBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + if not isinstance(settings, AzureAIInferenceChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, AzureAIInferenceChatPromptExecutionSettings) # nosec + + assert isinstance(self.client, ChatCompletionsClient) # nosec + response: AsyncStreamingChatCompletions = await self.client.complete( + stream=True, + messages=self._prepare_chat_history_for_request(chat_history), + model_extras=settings.extra_parameters, + **settings.prepare_settings_dict(), + ) + + async for chunk in response: + if len(chunk.choices) == 0: + continue + chunk_metadata = self._get_metadata_from_response(chunk) + yield [ + self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) for choice in chunk.choices + ] + + @override + def _verify_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if not isinstance(settings, AzureAIInferenceChatPromptExecutionSettings): + raise ServiceInvalidExecutionSettingsError( + "The settings must be an AzureAIInferenceChatPromptExecutionSettings." + ) + if settings.extra_parameters is not None and settings.extra_parameters.get("n", 1) > 1: + # Currently only OpenAI models allow multiple completions but the Azure AI Inference service + # does not expose the functionality directly. If users want to have more than 1 responses, they + # need to configure `extra_parameters` with a key of "n" and a value greater than 1. + raise ServiceInvalidExecutionSettingsError( + "Auto invocation of tool calls may only be used with a single completion." + ) + + @override + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + return update_settings_from_function_call_configuration + + @override + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if hasattr(settings, "tool_choice"): + settings.tool_choice = None + if hasattr(settings, "tools"): + settings.tools = None + + @override + def _prepare_chat_history_for_request( + self, + chat_history: ChatHistory, + role_key: str = "role", + content_key: str = "content", + ) -> list[ChatRequestMessage]: + chat_request_messages: list[ChatRequestMessage] = [] + + for message in chat_history.messages: + chat_request_messages.append(MESSAGE_CONVERTERS[message.role](message)) + + return chat_request_messages + + # endregion + + # region Non-streaming + def _create_chat_message_content( self, response: ChatCompletions, choice: ChatChoice, metadata: dict[str, Any] ) -> ChatMessageContent: @@ -230,114 +285,6 @@ def _create_chat_message_content( # endregion # region Streaming - async def get_streaming_chat_message_contents( - self, - chat_history: ChatHistory, - settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Get streaming chat message contents from the Azure AI Inference service. - - Args: - chat_history: A list of chats in a chat_history object. - settings: Settings for the request. - kwargs: Optional arguments. - - Returns: - A list of chat message contents. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) - assert isinstance(settings, AzureAIInferenceChatPromptExecutionSettings) # nosec - - kernel = kwargs.get("kernel") - if settings.function_choice_behavior is not None and (not kernel or not isinstance(kernel, Kernel)): - raise ServiceInvalidExecutionSettingsError("Kernel is required for auto invoking functions.") - - if kernel and settings.function_choice_behavior: - self._verify_function_choice_behavior(settings) - self._configure_function_choice_behavior(settings, kernel) - - if ( - settings.function_choice_behavior is None - or not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - # No auto invoke is required. - async_generator = self._send_chat_streaming_request(chat_history, settings) - else: - # Auto invoke is required. - async_generator = self._get_streaming_chat_message_contents_auto_invoke( - kernel, # type: ignore - kwargs.get("arguments"), - chat_history, - settings, - ) - - async for messages in async_generator: - yield messages - - async def _get_streaming_chat_message_contents_auto_invoke( - self, - kernel: Kernel, - arguments: KernelArguments | None, - chat_history: ChatHistory, - settings: AzureAIInferenceChatPromptExecutionSettings, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Get streaming chat message contents from the Azure AI Inference service with auto invoking functions.""" - # mypy doesn't recognize the settings.function_choice_behavior is not None by the check above - request_attempts = settings.function_choice_behavior.maximum_auto_invoke_attempts # type: ignore - - for request_index in range(request_attempts): - all_messages: list[StreamingChatMessageContent] = [] - function_call_returned = False - async for messages in self._send_chat_streaming_request(chat_history, settings): - for message in messages: - if message: - all_messages.append(message) - if any(isinstance(item, FunctionCallContent) for item in message.items): - function_call_returned = True - yield messages - - if not function_call_returned: - # Response doesn't contain any function calls. No need to proceed to the next request. - return - - full_completion: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_messages) - function_calls = [item for item in full_completion.items if isinstance(item, FunctionCallContent)] - chat_history.add_message(message=full_completion) - - results = await self._invoke_function_calls( - function_calls=function_calls, - chat_history=chat_history, - kernel=kernel, - arguments=arguments, - function_call_count=len(function_calls), - request_index=request_index, - # mypy doesn't recognize the settings.function_choice_behavior is not None by the check above - function_behavior=settings.function_choice_behavior, # type: ignore - ) - - if any(result.terminate for result in results if result is not None): - return - - async def _send_chat_streaming_request( - self, chat_history: ChatHistory, settings: AzureAIInferenceChatPromptExecutionSettings - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Send a streaming chat request to the Azure AI Inference service.""" - assert isinstance(self.client, ChatCompletionsClient) # nosec - response: AsyncStreamingChatCompletions = await self.client.complete( - stream=True, - messages=self._prepare_chat_history_for_request(chat_history), - model_extras=settings.extra_parameters, - **settings.prepare_settings_dict(), - ) - - async for chunk in response: - if len(chunk.choices) == 0: - continue - chunk_metadata = self._get_metadata_from_response(chunk) - yield [ - self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) for choice in chunk.choices - ] def _create_streaming_chat_message_content( self, @@ -388,20 +335,6 @@ def _create_streaming_chat_message_content( # endregion - @override - def _prepare_chat_history_for_request( - self, - chat_history: ChatHistory, - role_key: str = "role", - content_key: str = "content", - ) -> list[ChatRequestMessage]: - chat_request_messages: list[ChatRequestMessage] = [] - - for message in chat_history.messages: - chat_request_messages.append(MESSAGE_CONVERTERS[message.role](message)) - - return chat_request_messages - def _get_metadata_from_response(self, response: ChatCompletions | StreamingChatCompletionsUpdate) -> dict[str, Any]: """Get metadata from the response. @@ -415,60 +348,10 @@ def _get_metadata_from_response(self, response: ChatCompletions | StreamingChatC "id": response.id, "model": response.model, "created": response.created, - "usage": response.usage, - } - - def _verify_function_choice_behavior(self, settings: AzureAIInferenceChatPromptExecutionSettings): - """Verify the function choice behavior.""" - if settings.extra_parameters is not None and settings.extra_parameters.get("n", 1) > 1: - # Currently only OpenAI models allow multiple completions but the Azure AI Inference service - # does not expose the functionality directly. If users want to have more than 1 responses, they - # need to configure `extra_parameters` with a key of "n" and a value greater than 1. - raise ServiceInvalidExecutionSettingsError( - "Auto invocation of tool calls may only be used with a single completion." + "usage": CompletionUsage( + prompt_tokens=response.usage.prompt_tokens, + completion_tokens=response.usage.completion_tokens, ) - - def _configure_function_choice_behavior( - self, settings: AzureAIInferenceChatPromptExecutionSettings, kernel: Kernel - ): - """Configure the function choice behavior to include the kernel functions.""" - if not settings.function_choice_behavior: - return - - settings.function_choice_behavior.configure( - kernel=kernel, update_settings_callback=update_settings_from_function_call_configuration, settings=settings - ) - - async def _invoke_function_calls( - self, - function_calls: list[FunctionCallContent], - chat_history: ChatHistory, - kernel: Kernel, - arguments: KernelArguments | None, - function_call_count: int, - request_index: int, - function_behavior: FunctionChoiceBehavior, - ): - """Invoke function calls.""" - logger.info(f"processing {function_call_count} tool calls in parallel.") - - return await asyncio.gather( - *[ - kernel.invoke_function_call( - function_call=function_call, - chat_history=chat_history, - arguments=arguments, - function_call_count=function_call_count, - request_index=request_index, - function_behavior=function_behavior, - ) - for function_call in function_calls - ], - ) - - @override - def get_prompt_execution_settings_class( - self, - ) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return AzureAIInferenceChatPromptExecutionSettings + if response.usage + else None, + } diff --git a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py index 882449da7dfe..f6bb693bb6c8 100644 --- a/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/azure_ai_inference/services/azure_ai_inference_text_embedding.py @@ -14,12 +14,15 @@ else: from typing_extensions import override # pragma: no cover +from azure.identity import DefaultAzureCredential + from semantic_kernel.connectors.ai.azure_ai_inference.azure_ai_inference_prompt_execution_settings import ( AzureAIInferenceEmbeddingPromptExecutionSettings, ) from semantic_kernel.connectors.ai.azure_ai_inference.azure_ai_inference_settings import AzureAIInferenceSettings from semantic_kernel.connectors.ai.azure_ai_inference.services.azure_ai_inference_base import AzureAIInferenceBase from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError from semantic_kernel.utils.experimental_decorator import experimental_class from semantic_kernel.utils.telemetry.user_agent import SEMANTIC_KERNEL_USER_AGENT @@ -72,11 +75,22 @@ def __init__( except ValidationError as e: raise ServiceInitializationError(f"Failed to validate Azure AI Inference settings: {e}") from e - client = EmbeddingsClient( - endpoint=str(azure_ai_inference_settings.endpoint), - credential=AzureKeyCredential(azure_ai_inference_settings.api_key.get_secret_value()), - user_agent=SEMANTIC_KERNEL_USER_AGENT, - ) + endpoint = str(azure_ai_inference_settings.endpoint) + if azure_ai_inference_settings.api_key is not None: + client = EmbeddingsClient( + endpoint=endpoint, + credential=AzureKeyCredential(azure_ai_inference_settings.api_key.get_secret_value()), + user_agent=SEMANTIC_KERNEL_USER_AGENT, + ) + else: + # Try to create the client with a DefaultAzureCredential + client = EmbeddingsClient( + endpoint=endpoint, + credential=DefaultAzureCredential(), + credential_scopes=["https://cognitiveservices.azure.com/.default"], + api_version=DEFAULT_AZURE_API_VERSION, + user_agent=SEMANTIC_KERNEL_USER_AGENT, + ) super().__init__( ai_model_id=ai_model_id, diff --git a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py index 4fca8ae2906f..4531e9b5fe6d 100644 --- a/python/semantic_kernel/connectors/ai/chat_completion_client_base.py +++ b/python/semantic_kernel/connectors/ai/chat_completion_client_base.py @@ -1,24 +1,85 @@ # Copyright (c) Microsoft. All rights reserved. -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator -from typing import TYPE_CHECKING, Any +import asyncio +import copy +import logging +from abc import ABC +from collections.abc import AsyncGenerator, Callable +from functools import reduce +from typing import TYPE_CHECKING, Any, ClassVar +from opentelemetry.trace import Span, Tracer, get_tracer, use_span + +from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_calling_utils import merge_function_results +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior, FunctionChoiceType +from semantic_kernel.const import AUTO_FUNCTION_INVOCATION_SPAN_NAME from semantic_kernel.contents.annotation_content import AnnotationContent from semantic_kernel.contents.file_reference_content import FileReferenceContent +from semantic_kernel.contents.function_call_content import FunctionCallContent +from semantic_kernel.exceptions.service_exceptions import ServiceInvalidExecutionSettingsError from semantic_kernel.services.ai_service_client_base import AIServiceClientBase +from semantic_kernel.utils.telemetry.model_diagnostics.gen_ai_attributes import AVAILABLE_FUNCTIONS if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent + from semantic_kernel.kernel import Kernel + +logger: logging.Logger = logging.getLogger(__name__) +tracer: Tracer = get_tracer(__name__) class ChatCompletionClientBase(AIServiceClientBase, ABC): """Base class for chat completion AI services.""" - @abstractmethod + # Connectors that support function calling should set this to True + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = False + + # region Internal methods to be implemented by the derived classes + + async def _inner_get_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> list["ChatMessageContent"]: + """Send a chat request to the AI service. + + Args: + chat_history (ChatHistory): The chat history to send. + settings (PromptExecutionSettings): The settings for the request. + + Returns: + chat_message_contents (list[ChatMessageContent]): The chat message contents representing the response(s). + """ + raise NotImplementedError("The _inner_get_chat_message_contents method is not implemented.") + + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + """Send a streaming chat request to the AI service. + + Args: + chat_history (ChatHistory): The chat history to send. + settings (PromptExecutionSettings): The settings for the request. + + Yields: + streaming_chat_message_contents (list[StreamingChatMessageContent]): The streaming chat message contents. + """ + raise NotImplementedError("The _inner_get_streaming_chat_message_contents method is not implemented.") + # Below is needed for mypy: https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators + if False: + yield + + # endregion + + # region Public methods + async def get_chat_message_contents( self, chat_history: "ChatHistory", @@ -36,7 +97,80 @@ async def get_chat_message_contents( Returns: A list of chat message contents representing the response(s) from the LLM. """ - pass + # Create a copy of the settings to avoid modifying the original settings + settings = copy.deepcopy(settings) + + if not self.SUPPORTS_FUNCTION_CALLING: + return await self._inner_get_chat_message_contents(chat_history, settings) + + # For backwards compatibility we need to convert the `FunctionCallBehavior` to `FunctionChoiceBehavior` + # if this method is called with a `FunctionCallBehavior` object as part of the settings + if hasattr(settings, "function_call_behavior") and isinstance( + settings.function_call_behavior, FunctionCallBehavior + ): + settings.function_choice_behavior = FunctionChoiceBehavior.from_function_call_behavior( + settings.function_call_behavior + ) + + kernel = kwargs.get("kernel", None) + if settings.function_choice_behavior is not None: + if kernel is None: + raise ServiceInvalidExecutionSettingsError("The kernel is required for function calls.") + self._verify_function_choice_settings(settings) + + if settings.function_choice_behavior and kernel: + # Configure the function choice behavior into the settings object + # that will become part of the request to the AI service + settings.function_choice_behavior.configure( + kernel=kernel, + update_settings_callback=self._update_function_choice_settings_callback(), + settings=settings, + ) + + if ( + settings.function_choice_behavior is None + or not settings.function_choice_behavior.auto_invoke_kernel_functions + ): + return await self._inner_get_chat_message_contents(chat_history, settings) + + # Auto invoke loop + with use_span(self._start_auto_function_invocation_activity(kernel, settings), end_on_exit=True) as _: + for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): + completions = await self._inner_get_chat_message_contents(chat_history, settings) + # Get the function call contents from the chat message. There is only one chat message, + # which should be checked in the `_verify_function_choice_settings` method. + function_calls = [item for item in completions[0].items if isinstance(item, FunctionCallContent)] + if (fc_count := len(function_calls)) == 0: + return completions + + # Since we have a function call, add the assistant's tool call message to the history + chat_history.add_message(message=completions[0]) + + logger.info(f"processing {fc_count} tool calls in parallel.") + + # This function either updates the chat history with the function call results + # or returns the context, with terminate set to True in which case the loop will + # break and the function calls are returned. + results = await asyncio.gather( + *[ + kernel.invoke_function_call( + function_call=function_call, + chat_history=chat_history, + arguments=kwargs.get("arguments", None), + function_call_count=fc_count, + request_index=request_index, + function_behavior=settings.function_choice_behavior, + ) + for function_call in function_calls + ], + ) + + if any(result.terminate for result in results if result is not None): + return merge_function_results(chat_history.messages[-len(results) :]) + else: + # Do a final call, without function calling when the max has been reached. + self._reset_function_choice_settings(settings) + return await self._inner_get_chat_message_contents(chat_history, settings) async def get_chat_message_content( self, chat_history: "ChatHistory", settings: "PromptExecutionSettings", **kwargs: Any @@ -58,8 +192,7 @@ async def get_chat_message_content( # this should not happen, should error out before returning an empty list return None # pragma: no cover - @abstractmethod - def get_streaming_chat_message_contents( + async def get_streaming_chat_message_contents( self, chat_history: "ChatHistory", settings: "PromptExecutionSettings", @@ -76,7 +209,97 @@ def get_streaming_chat_message_contents( Yields: A stream representing the response(s) from the LLM. """ - ... + # Create a copy of the settings to avoid modifying the original settings + settings = copy.deepcopy(settings) + + if not self.SUPPORTS_FUNCTION_CALLING: + async for streaming_chat_message_contents in self._inner_get_streaming_chat_message_contents( + chat_history, settings + ): + yield streaming_chat_message_contents + return + + # For backwards compatibility we need to convert the `FunctionCallBehavior` to `FunctionChoiceBehavior` + # if this method is called with a `FunctionCallBehavior` object as part of the settings + if hasattr(settings, "function_call_behavior") and isinstance( + settings.function_call_behavior, FunctionCallBehavior + ): + settings.function_choice_behavior = FunctionChoiceBehavior.from_function_call_behavior( + settings.function_call_behavior + ) + + kernel = kwargs.get("kernel", None) + if settings.function_choice_behavior is not None: + if kernel is None: + raise ServiceInvalidExecutionSettingsError("The kernel is required for function calls.") + self._verify_function_choice_settings(settings) + + if settings.function_choice_behavior and kernel: + # Configure the function choice behavior into the settings object + # that will become part of the request to the AI service + settings.function_choice_behavior.configure( + kernel=kernel, + update_settings_callback=self._update_function_choice_settings_callback(), + settings=settings, + ) + + if ( + settings.function_choice_behavior is None + or not settings.function_choice_behavior.auto_invoke_kernel_functions + ): + async for streaming_chat_message_contents in self._inner_get_streaming_chat_message_contents( + chat_history, settings + ): + yield streaming_chat_message_contents + return + + # Auto invoke loop + with use_span(self._start_auto_function_invocation_activity(kernel, settings), end_on_exit=True) as _: + for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): + # Hold the messages, if there are more than one response, it will not be used, so we flatten + all_messages: list["StreamingChatMessageContent"] = [] + function_call_returned = False + async for messages in self._inner_get_streaming_chat_message_contents(chat_history, settings): + for msg in messages: + if msg is not None: + all_messages.append(msg) + if any(isinstance(item, FunctionCallContent) for item in msg.items): + function_call_returned = True + yield messages + + if not function_call_returned: + return + + # There is one FunctionCallContent response stream in the messages, combining now to create + # the full completion depending on the prompt, the message may contain both function call + # content and others + full_completion: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_messages) + function_calls = [item for item in full_completion.items if isinstance(item, FunctionCallContent)] + chat_history.add_message(message=full_completion) + + fc_count = len(function_calls) + logger.info(f"processing {fc_count} tool calls in parallel.") + + # This function either updates the chat history with the function call results + # or returns the context, with terminate set to True in which case the loop will + # break and the function calls are returned. + results = await asyncio.gather( + *[ + kernel.invoke_function_call( + function_call=function_call, + chat_history=chat_history, + arguments=kwargs.get("arguments", None), + function_call_count=fc_count, + request_index=request_index, + function_behavior=settings.function_choice_behavior, + ) + for function_call in function_calls + ], + ) + + if any(result.terminate for result in results if result is not None): + yield merge_function_results(chat_history.messages[-len(results) :]) # type: ignore + break async def get_streaming_chat_message_content( self, @@ -104,6 +327,10 @@ async def get_streaming_chat_message_content( # this should not happen, should error out before returning an empty list yield None # pragma: no cover + # endregion + + # region internal handlers + def _prepare_chat_history_for_request( self, chat_history: "ChatHistory", @@ -133,3 +360,53 @@ def _prepare_chat_history_for_request( for message in chat_history.messages if not isinstance(message, (AnnotationContent, FileReferenceContent)) ] + + def _verify_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + """Additional verification to validate settings for function choice behavior. + + Override this method to add additional verification for the settings. + + Args: + settings (PromptExecutionSettings): The settings to verify. + """ + return + + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + """Return the callback function to update the settings from a function call configuration. + + Override this method to provide a custom callback function to + update the settings from a function call configuration. + """ + return lambda configuration, settings, choice_type: None + + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + """Reset the settings updated by `_update_function_choice_settings_callback`. + + Override this method to reset the settings updated by `_update_function_choice_settings_callback`. + + Args: + settings (PromptExecutionSettings): The prompt execution settings to reset. + """ + return + + def _start_auto_function_invocation_activity(self, kernel: "Kernel", settings: "PromptExecutionSettings") -> Span: + """Start the auto function invocation activity. + + Args: + kernel (Kernel): The kernel instance. + settings (PromptExecutionSettings): The prompt execution settings. + """ + span = tracer.start_span(AUTO_FUNCTION_INVOCATION_SPAN_NAME) + + if settings.function_choice_behavior is not None: + available_functions = settings.function_choice_behavior.get_config(kernel).available_functions or [] + span.set_attribute( + AVAILABLE_FUNCTIONS, + ",".join([f.fully_qualified_name for f in available_functions]), + ) + + return span + + # endregion diff --git a/python/semantic_kernel/connectors/ai/completion_usage.py b/python/semantic_kernel/connectors/ai/completion_usage.py new file mode 100644 index 000000000000..6998ed7cd602 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/completion_usage.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft. All rights reserved. + +from openai.types import CompletionUsage as OpenAICompletionUsage + +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class CompletionUsage(KernelBaseModel): + """Completion usage information.""" + + prompt_tokens: int | None = None + completion_tokens: int | None = None + + @classmethod + def from_openai(cls, openai_completion_usage: OpenAICompletionUsage): + """Create a CompletionUsage object from an OpenAI response.""" + return cls( + prompt_tokens=openai_completion_usage.prompt_tokens, + completion_tokens=openai_completion_usage.completion_tokens, + ) diff --git a/python/semantic_kernel/connectors/ai/function_calling_utils.py b/python/semantic_kernel/connectors/ai/function_calling_utils.py index 70240b45710f..6ed5ed5a6b38 100644 --- a/python/semantic_kernel/connectors/ai/function_calling_utils.py +++ b/python/semantic_kernel/connectors/ai/function_calling_utils.py @@ -3,10 +3,16 @@ from collections import OrderedDict from typing import TYPE_CHECKING, Any +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_result_content import FunctionResultContent +from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError if TYPE_CHECKING: - from semantic_kernel.connectors.ai.function_choice_behavior import FunctionCallChoiceConfiguration + from semantic_kernel.connectors.ai.function_choice_behavior import ( + FunctionCallChoiceConfiguration, + FunctionChoiceType, + ) from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata @@ -14,7 +20,7 @@ def update_settings_from_function_call_configuration( function_choice_configuration: "FunctionCallChoiceConfiguration", settings: "PromptExecutionSettings", - type: str, + type: "FunctionChoiceType", ) -> None: """Update the settings from a FunctionChoiceConfiguration.""" if ( @@ -68,3 +74,22 @@ def _combine_filter_dicts(*dicts: dict[str, list[str]]) -> dict: combined_filters[key] = list(combined_functions.keys()) return combined_filters + + +def merge_function_results( + messages: list[ChatMessageContent], +) -> list[ChatMessageContent]: + """Combine multiple function result content types to one chat message content type. + + This method combines the FunctionResultContent items from separate ChatMessageContent messages, + and is used in the event that the `context.terminate = True` condition is met. + """ + items: list[Any] = [] + for message in messages: + items.extend([item for item in message.items if isinstance(item, FunctionResultContent)]) + return [ + ChatMessageContent( + role=AuthorRole.TOOL, + items=items, + ) + ] diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_base.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_base.py index 91446835302d..5bbc19568bc1 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_base.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_base.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC +from typing import ClassVar from semantic_kernel.connectors.ai.google.google_ai.google_ai_settings import GoogleAISettings from semantic_kernel.kernel_pydantic import KernelBaseModel @@ -9,4 +10,6 @@ class GoogleAIBase(KernelBaseModel, ABC): """Google AI Service.""" + MODEL_PROVIDER_NAME: ClassVar[str] = "googleai" + service_settings: GoogleAISettings diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py index 8b72915b1b82..10c4e7de3fd4 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_chat_completion.py @@ -3,9 +3,8 @@ import logging import sys -from collections.abc import AsyncGenerator -from functools import reduce -from typing import TYPE_CHECKING, Any +from collections.abc import AsyncGenerator, Callable +from typing import TYPE_CHECKING, Any, ClassVar import google.generativeai as genai from google.generativeai import GenerativeModel @@ -13,9 +12,14 @@ from google.generativeai.types import AsyncGenerateContentResponse, GenerateContentResponse, GenerationConfig from pydantic import ValidationError +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.google.google_ai.google_ai_prompt_execution_settings import ( GoogleAIChatPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.google.google_ai.google_ai_settings import GoogleAISettings from semantic_kernel.connectors.ai.google.google_ai.services.google_ai_base import GoogleAIBase from semantic_kernel.connectors.ai.google.google_ai.services.utils import ( finish_reason_from_google_ai_to_semantic_kernel, @@ -25,11 +29,11 @@ update_settings_from_function_choice_configuration, ) from semantic_kernel.connectors.ai.google.shared_utils import ( - configure_function_choice_behavior, filter_system_message, format_gemini_function_name_to_kernel_function_fully_qualified_name, - invoke_function_calls, ) +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.streaming_chat_message_content import ITEM_TYPES as STREAMING_ITEM_TYPES from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent @@ -37,32 +41,31 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.connectors.ai.google.google_ai.google_ai_settings import GoogleAISettings -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ITEM_TYPES, ChatMessageContent from semantic_kernel.exceptions.service_exceptions import ( ServiceInitializationError, ServiceInvalidExecutionSettingsError, ) +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + logger: logging.Logger = logging.getLogger(__name__) class GoogleAIChatCompletion(GoogleAIBase, ChatCompletionClientBase): """Google AI Chat Completion Client.""" + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True + def __init__( self, gemini_model_id: str | None = None, @@ -106,57 +109,50 @@ def __init__( service_settings=google_ai_settings, ) - # region Non-streaming + # region Overriding base class methods + + # Override from AIServiceClientBase @override - async def get_chat_message_contents( + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return GoogleAIChatPromptExecutionSettings + + @override + @trace_chat_completion(GoogleAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_chat_message_contents( self, - chat_history: ChatHistory, + chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> list[ChatMessageContent]: - settings = self.get_prompt_execution_settings_from_settings(settings) + ) -> list["ChatMessageContent"]: + if not isinstance(settings, GoogleAIChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec - kernel = kwargs.get("kernel") - if settings.function_choice_behavior is not None and (not kernel or not isinstance(kernel, Kernel)): - raise ServiceInvalidExecutionSettingsError("Kernel is required for auto invoking functions.") - - if kernel and settings.function_choice_behavior: - configure_function_choice_behavior(settings, kernel, update_settings_from_function_choice_configuration) - - if ( - settings.function_choice_behavior is None - or not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - return await self._send_chat_request(chat_history, settings) - - for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): - completions = await self._send_chat_request(chat_history, settings) - chat_history.add_message(message=completions[0]) - function_calls = [item for item in chat_history.messages[-1].items if isinstance(item, FunctionCallContent)] - if (fc_count := len(function_calls)) == 0: - return completions - - results = await invoke_function_calls( - function_calls=function_calls, - chat_history=chat_history, - kernel=kernel, # type: ignore - arguments=kwargs.get("arguments", None), - function_call_count=fc_count, - request_index=request_index, - function_behavior=settings.function_choice_behavior, - ) + genai.configure(api_key=self.service_settings.api_key.get_secret_value()) + model = GenerativeModel( + self.service_settings.gemini_model_id, + system_instruction=filter_system_message(chat_history), + ) + + response: AsyncGenerateContentResponse = await model.generate_content_async( + contents=self._prepare_chat_history_for_request(chat_history), + generation_config=GenerationConfig(**settings.prepare_settings_dict()), + tools=settings.tools, + tool_config=settings.tool_config, + ) + + return [self._create_chat_message_content(response, candidate) for candidate in response.candidates] - if any(result.terminate for result in results if result is not None): - return completions - else: - # do a final call without auto function calling - return await self._send_chat_request(chat_history, settings) + @override + @trace_streaming_chat_completion(GoogleAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + if not isinstance(settings, GoogleAIChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec - async def _send_chat_request( - self, chat_history: ChatHistory, settings: GoogleAIChatPromptExecutionSettings - ) -> list[ChatMessageContent]: - """Send a chat request to the Google AI service.""" genai.configure(api_key=self.service_settings.api_key.get_secret_value()) model = GenerativeModel( self.service_settings.gemini_model_id, @@ -168,9 +164,61 @@ async def _send_chat_request( generation_config=GenerationConfig(**settings.prepare_settings_dict()), tools=settings.tools, tool_config=settings.tool_config, + stream=True, ) - return [self._create_chat_message_content(response, candidate) for candidate in response.candidates] + async for chunk in response: + yield [self._create_streaming_chat_message_content(chunk, candidate) for candidate in chunk.candidates] + + @override + def _verify_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if not isinstance(settings, GoogleAIChatPromptExecutionSettings): + raise ServiceInvalidExecutionSettingsError("The settings must be an GoogleAIChatPromptExecutionSettings.") + if settings.candidate_count is not None and settings.candidate_count > 1: + raise ServiceInvalidExecutionSettingsError( + "Auto-invocation of tool calls may only be used with a " + "GoogleAIChatPromptExecutionSettings.candidate_count of 1." + ) + + @override + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + return update_settings_from_function_choice_configuration + + @override + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if hasattr(settings, "tool_config"): + settings.tool_config = None + if hasattr(settings, "tools"): + settings.tools = None + + @override + def _prepare_chat_history_for_request( + self, + chat_history: ChatHistory, + role_key: str = "role", + content_key: str = "content", + ) -> list[Content]: + chat_request_messages: list[Content] = [] + + for message in chat_history.messages: + if message.role == AuthorRole.SYSTEM: + # Skip system messages since they are not part of the chat request. + # System message will be provided as system_instruction in the model. + continue + if message.role == AuthorRole.USER: + chat_request_messages.append(Content(role="user", parts=format_user_message(message))) + elif message.role == AuthorRole.ASSISTANT: + chat_request_messages.append(Content(role="model", parts=format_assistant_message(message))) + elif message.role == AuthorRole.TOOL: + chat_request_messages.append(Content(role="function", parts=format_tool_message(message))) + + return chat_request_messages + + # endregion + + # region Non-streaming def _create_chat_message_content( self, response: AsyncGenerateContentResponse, candidate: Candidate @@ -216,108 +264,6 @@ def _create_chat_message_content( # endregion # region Streaming - @override - async def get_streaming_chat_message_contents( - self, - chat_history: ChatHistory, - settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - settings = self.get_prompt_execution_settings_from_settings(settings) - assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec - - kernel = kwargs.get("kernel") - if settings.function_choice_behavior is not None and (not kernel or not isinstance(kernel, Kernel)): - raise ServiceInvalidExecutionSettingsError("Kernel is required for auto invoking functions.") - - if kernel and settings.function_choice_behavior: - configure_function_choice_behavior(settings, kernel, update_settings_from_function_choice_configuration) - - if ( - settings.function_choice_behavior is None - or not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - # No auto invoke is required. - async_generator = self._send_chat_streaming_request(chat_history, settings) - else: - # Auto invoke is required. - async_generator = self._get_streaming_chat_message_contents_auto_invoke( - kernel, # type: ignore - kwargs.get("arguments"), - chat_history, - settings, - ) - - async for messages in async_generator: - yield messages - - async def _get_streaming_chat_message_contents_auto_invoke( - self, - kernel: Kernel, - arguments: KernelArguments | None, - chat_history: ChatHistory, - settings: GoogleAIChatPromptExecutionSettings, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Get streaming chat message contents from the Google AI service with auto invoking functions.""" - if not settings.function_choice_behavior: - raise ServiceInvalidExecutionSettingsError( - "Function choice behavior is required for auto invoking functions." - ) - - for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): - all_messages: list[StreamingChatMessageContent] = [] - function_call_returned = False - async for messages in self._send_chat_streaming_request(chat_history, settings): - for message in messages: - if message: - all_messages.append(message) - if any(isinstance(item, FunctionCallContent) for item in message.items): - function_call_returned = True - yield messages - - if not function_call_returned: - # Response doesn't contain any function calls. No need to proceed to the next request. - return - - full_completion: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_messages) - function_calls = [item for item in full_completion.items if isinstance(item, FunctionCallContent)] - chat_history.add_message(message=full_completion) - - results = await invoke_function_calls( - function_calls=function_calls, - chat_history=chat_history, - kernel=kernel, - arguments=arguments, - function_call_count=len(function_calls), - request_index=request_index, - function_behavior=settings.function_choice_behavior, - ) - - if any(result.terminate for result in results if result is not None): - return - - async def _send_chat_streaming_request( - self, - chat_history: ChatHistory, - settings: GoogleAIChatPromptExecutionSettings, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Send a streaming chat request to the Google AI service.""" - genai.configure(api_key=self.service_settings.api_key.get_secret_value()) - model = GenerativeModel( - self.service_settings.gemini_model_id, - system_instruction=filter_system_message(chat_history), - ) - - response: AsyncGenerateContentResponse = await model.generate_content_async( - contents=self._prepare_chat_history_for_request(chat_history), - generation_config=GenerationConfig(**settings.prepare_settings_dict()), - tools=settings.tools, - tool_config=settings.tool_config, - stream=True, - ) - - async for chunk in response: - yield [self._create_streaming_chat_message_content(chunk, candidate) for candidate in chunk.candidates] def _create_streaming_chat_message_content( self, @@ -372,29 +318,6 @@ def _create_streaming_chat_message_content( # endregion - @override - def _prepare_chat_history_for_request( - self, - chat_history: ChatHistory, - role_key: str = "role", - content_key: str = "content", - ) -> list[Content]: - chat_request_messages: list[Content] = [] - - for message in chat_history.messages: - if message.role == AuthorRole.SYSTEM: - # Skip system messages since they are not part of the chat request. - # System message will be provided as system_instruction in the model. - continue - if message.role == AuthorRole.USER: - chat_request_messages.append(Content(role="user", parts=format_user_message(message))) - elif message.role == AuthorRole.ASSISTANT: - chat_request_messages.append(Content(role="model", parts=format_assistant_message(message))) - elif message.role == AuthorRole.TOOL: - chat_request_messages.append(Content(role="function", parts=format_tool_message(message))) - - return chat_request_messages - def _get_metadata_from_response( self, response: AsyncGenerateContentResponse | GenerateContentResponse ) -> dict[str, Any]: @@ -408,7 +331,10 @@ def _get_metadata_from_response( """ return { "prompt_feedback": response.prompt_feedback, - "usage": response.usage_metadata, + "usage": CompletionUsage( + prompt_tokens=response.usage_metadata.prompt_token_count, + completion_tokens=response.usage_metadata.candidates_token_count, + ), } def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: @@ -426,10 +352,3 @@ def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: "safety_ratings": candidate.safety_ratings, "token_count": candidate.token_count, } - - @override - def get_prompt_execution_settings_class( - self, - ) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return GoogleAIChatPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py index a38201db6b67..078361e8d70c 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/google_ai_text_completion.py @@ -10,25 +10,29 @@ from google.generativeai.types import AsyncGenerateContentResponse, GenerateContentResponse, GenerationConfig from pydantic import ValidationError +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage from semantic_kernel.connectors.ai.google.google_ai.google_ai_prompt_execution_settings import ( GoogleAITextPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.google.google_ai.google_ai_settings import GoogleAISettings from semantic_kernel.connectors.ai.google.google_ai.services.google_ai_base import GoogleAIBase from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase - -if sys.version_info >= (3, 12): - from typing import override # pragma: no cover -else: - from typing_extensions import override # pragma: no cover - -from semantic_kernel.connectors.ai.google.google_ai.google_ai_settings import GoogleAISettings from semantic_kernel.contents import TextContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_streaming_text_completion, + trace_text_completion, +) if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + class GoogleAITextCompletion(GoogleAIBase, TextCompletionClientBase): """Google AI Text Completion Client.""" @@ -76,20 +80,24 @@ def __init__( service_settings=google_ai_settings, ) - # region Non-streaming + # region Overriding base class methods + + # Override from AIServiceClientBase @override - async def get_text_contents( + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return GoogleAITextPromptExecutionSettings + + @override + @trace_text_completion(GoogleAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_text_contents( self, prompt: str, settings: "PromptExecutionSettings", ) -> list[TextContent]: - settings = self.get_prompt_execution_settings_from_settings(settings) + if not isinstance(settings, GoogleAITextPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, GoogleAITextPromptExecutionSettings) # nosec - return await self._send_request(prompt, settings) - - async def _send_request(self, prompt: str, settings: GoogleAITextPromptExecutionSettings) -> list[TextContent]: - """Send a text generation request to the Google AI service.""" genai.configure(api_key=self.service_settings.api_key.get_secret_value()) model = GenerativeModel( self.service_settings.gemini_model_id, @@ -102,47 +110,17 @@ async def _send_request(self, prompt: str, settings: GoogleAITextPromptExecution return [self._create_text_content(response, candidate) for candidate in response.candidates] - def _create_text_content(self, response: AsyncGenerateContentResponse, candidate: Candidate) -> TextContent: - """Create a text content object. - - Args: - response: The response from the service. - candidate: The candidate from the response. - - Returns: - A text content object. - """ - response_metadata = self._get_metadata_from_response(response) - response_metadata.update(self._get_metadata_from_candidate(candidate)) - - return TextContent( - ai_model_id=self.ai_model_id, - text=candidate.content.parts[0].text, - inner_content=response, - metadata=response_metadata, - ) - - # endregion - - # region Streaming @override - async def get_streaming_text_contents( + @trace_streaming_text_completion(GoogleAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_text_contents( self, prompt: str, settings: "PromptExecutionSettings", - ) -> AsyncGenerator[list["StreamingTextContent"], Any]: - settings = self.get_prompt_execution_settings_from_settings(settings) + ) -> AsyncGenerator[list[StreamingTextContent], Any]: + if not isinstance(settings, GoogleAITextPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, GoogleAITextPromptExecutionSettings) # nosec - async_generator = self._send_streaming_request(prompt, settings) - - async for text_contents in async_generator: - yield text_contents - - async def _send_streaming_request( - self, prompt: str, settings: GoogleAITextPromptExecutionSettings - ) -> AsyncGenerator[list[StreamingTextContent], Any]: - """Send a text generation request to the Google AI service.""" genai.configure(api_key=self.service_settings.api_key.get_secret_value()) model = GenerativeModel( self.service_settings.gemini_model_id, @@ -157,6 +135,28 @@ async def _send_streaming_request( async for chunk in response: yield [self._create_streaming_text_content(chunk, candidate) for candidate in chunk.candidates] + # endregion + + def _create_text_content(self, response: AsyncGenerateContentResponse, candidate: Candidate) -> TextContent: + """Create a text content object. + + Args: + response: The response from the service. + candidate: The candidate from the response. + + Returns: + A text content object. + """ + response_metadata = self._get_metadata_from_response(response) + response_metadata.update(self._get_metadata_from_candidate(candidate)) + + return TextContent( + ai_model_id=self.ai_model_id, + text=candidate.content.parts[0].text, + inner_content=response, + metadata=response_metadata, + ) + def _create_streaming_text_content( self, chunk: GenerateContentResponse, candidate: Candidate ) -> StreamingTextContent: @@ -180,8 +180,6 @@ def _create_streaming_text_content( metadata=response_metadata, ) - # endregion - def _get_metadata_from_response( self, response: AsyncGenerateContentResponse | GenerateContentResponse, @@ -196,7 +194,10 @@ def _get_metadata_from_response( """ return { "prompt_feedback": response.prompt_feedback, - "usage": response.usage_metadata, + "usage": CompletionUsage( + prompt_tokens=response.usage_metadata.prompt_token_count, + completion_tokens=response.usage_metadata.candidates_token_count, + ), } def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: @@ -214,10 +215,3 @@ def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: "safety_ratings": candidate.safety_ratings, "token_count": candidate.token_count, } - - @override - def get_prompt_execution_settings_class( - self, - ) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return GoogleAITextPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/google/google_ai/services/utils.py b/python/semantic_kernel/connectors/ai/google/google_ai/services/utils.py index abbd5bf1281d..c2b28e54d5c2 100644 --- a/python/semantic_kernel/connectors/ai/google/google_ai/services/utils.py +++ b/python/semantic_kernel/connectors/ai/google/google_ai/services/utils.py @@ -16,6 +16,7 @@ format_function_result_content_name_to_gemini_function_name, format_kernel_function_fully_qualified_name_to_gemini_function_name, ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent @@ -125,7 +126,7 @@ def format_tool_message(message: ChatMessageContent) -> list[Part]: name=gemini_function_name, response={ "name": gemini_function_name, - "content": item.result, + "content": str(item.result), }, ) ) @@ -149,10 +150,12 @@ def kernel_function_metadata_to_google_ai_function_call_format(metadata: KernelF def update_settings_from_function_choice_configuration( function_choice_configuration: FunctionCallChoiceConfiguration, - settings: GoogleAIChatPromptExecutionSettings, + settings: PromptExecutionSettings, type: FunctionChoiceType, ) -> None: """Update the settings from a FunctionChoiceConfiguration.""" + assert isinstance(settings, GoogleAIChatPromptExecutionSettings) # nosec + if function_choice_configuration.available_functions: settings.tool_config = { "function_calling_config": { diff --git a/python/semantic_kernel/connectors/ai/google/shared_utils.py b/python/semantic_kernel/connectors/ai/google/shared_utils.py index 5e8686d4397f..8401d5df20a4 100644 --- a/python/semantic_kernel/connectors/ai/google/shared_utils.py +++ b/python/semantic_kernel/connectors/ai/google/shared_utils.py @@ -1,27 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio import logging -from collections.abc import Callable -from typing import TYPE_CHECKING -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior, FunctionChoiceType +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.service_exceptions import ServiceInvalidRequestError -from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata -from semantic_kernel.kernel import Kernel - -if TYPE_CHECKING: - from semantic_kernel.connectors.ai.google.google_ai.google_ai_prompt_execution_settings import ( - GoogleAIChatPromptExecutionSettings, - ) - from semantic_kernel.connectors.ai.google.vertex_ai.vertex_ai_prompt_execution_settings import ( - VertexAIChatPromptExecutionSettings, - ) logger: logging.Logger = logging.getLogger(__name__) @@ -44,33 +30,6 @@ def filter_system_message(chat_history: ChatHistory) -> str | None: return None -async def invoke_function_calls( - function_calls: list[FunctionCallContent], - chat_history: ChatHistory, - kernel: Kernel, - arguments: KernelArguments | None, - function_call_count: int, - request_index: int, - function_behavior: FunctionChoiceBehavior, -): - """Invoke function calls.""" - logger.info(f"processing {function_call_count} tool calls in parallel.") - - return await asyncio.gather( - *[ - kernel.invoke_function_call( - function_call=function_call, - chat_history=chat_history, - arguments=arguments, - function_call_count=function_call_count, - request_index=request_index, - function_behavior=function_behavior, - ) - for function_call in function_calls - ], - ) - - FUNCTION_CHOICE_TYPE_TO_GOOGLE_FUNCTION_CALLING_MODE = { FunctionChoiceType.AUTO: "AUTO", FunctionChoiceType.NONE: "NONE", @@ -107,15 +66,3 @@ def format_gemini_function_name_to_kernel_function_fully_qualified_name(gemini_f plugin_name, function_name = gemini_function_name.split(GEMINI_FUNCTION_NAME_SEPARATOR, 1) return f"{plugin_name}-{function_name}" return gemini_function_name - - -def configure_function_choice_behavior( - settings: "GoogleAIChatPromptExecutionSettings | VertexAIChatPromptExecutionSettings", - kernel: Kernel, - callback: Callable[..., None], -): - """Configure the function choice behavior to include the kernel functions.""" - if not settings.function_choice_behavior: - return - - settings.function_choice_behavior.configure(kernel=kernel, update_settings_callback=callback, settings=settings) diff --git a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/utils.py b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/utils.py index 400329688331..f36cb15e9fe6 100644 --- a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/utils.py +++ b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/utils.py @@ -2,7 +2,6 @@ import json import logging -from typing import Any from google.cloud.aiplatform_v1beta1.types.content import Blob, Candidate, Part from google.cloud.aiplatform_v1beta1.types.tool import FunctionCall, FunctionResponse @@ -18,6 +17,7 @@ from semantic_kernel.connectors.ai.google.vertex_ai.vertex_ai_prompt_execution_settings import ( VertexAIChatPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent @@ -127,7 +127,7 @@ def format_tool_message(message: ChatMessageContent) -> list[Part]: name=gemini_function_name, response={ "name": gemini_function_name, - "content": item.result, + "content": str(item.result), }, ) ) @@ -136,7 +136,7 @@ def format_tool_message(message: ChatMessageContent) -> list[Part]: return parts -def kernel_function_metadata_to_vertex_ai_function_call_format(metadata: KernelFunctionMetadata) -> dict[str, Any]: +def kernel_function_metadata_to_vertex_ai_function_call_format(metadata: KernelFunctionMetadata) -> FunctionDeclaration: """Convert the kernel function metadata to function calling format.""" return FunctionDeclaration( name=format_kernel_function_fully_qualified_name_to_gemini_function_name(metadata), @@ -151,10 +151,12 @@ def kernel_function_metadata_to_vertex_ai_function_call_format(metadata: KernelF def update_settings_from_function_choice_configuration( function_choice_configuration: FunctionCallChoiceConfiguration, - settings: VertexAIChatPromptExecutionSettings, + settings: PromptExecutionSettings, type: FunctionChoiceType, ) -> None: """Update the settings from a FunctionChoiceConfiguration.""" + assert isinstance(settings, VertexAIChatPromptExecutionSettings) # nosec + if function_choice_configuration.available_functions: settings.tool_config = ToolConfig( function_calling_config=ToolConfig.FunctionCallingConfig( diff --git a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_base.py b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_base.py index e17b1994424d..29e5d2502b63 100644 --- a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_base.py +++ b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_base.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC +from typing import ClassVar from semantic_kernel.connectors.ai.google.vertex_ai.vertex_ai_settings import VertexAISettings from semantic_kernel.kernel_pydantic import KernelBaseModel @@ -9,4 +10,6 @@ class VertexAIBase(KernelBaseModel, ABC): """Vertex AI Service.""" + MODEL_PROVIDER_NAME: ClassVar[str] = "vertexai" + service_settings: VertexAISettings diff --git a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py index 245f9a434e45..45d66396ff34 100644 --- a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_chat_completion.py @@ -1,20 +1,21 @@ # Copyright (c) Microsoft. All rights reserved. import sys -from collections.abc import AsyncGenerator, AsyncIterable -from functools import reduce -from typing import Any +from collections.abc import AsyncGenerator, AsyncIterable, Callable +from typing import Any, ClassVar import vertexai from google.cloud.aiplatform_v1beta1.types.content import Content from pydantic import ValidationError from vertexai.generative_models import Candidate, GenerationResponse, GenerativeModel +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.google.shared_utils import ( - configure_function_choice_behavior, filter_system_message, format_gemini_function_name_to_kernel_function_fully_qualified_name, - invoke_function_calls, ) from semantic_kernel.connectors.ai.google.vertex_ai.services.utils import ( finish_reason_from_vertex_ai_to_semantic_kernel, @@ -42,20 +43,22 @@ ServiceInitializationError, ServiceInvalidExecutionSettingsError, ) -from semantic_kernel.functions.kernel_arguments import KernelArguments -from semantic_kernel.kernel import Kernel +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover -from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase - class VertexAIChatCompletion(VertexAIBase, ChatCompletionClientBase): """Google Vertex AI Chat Completion Service.""" + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True + def __init__( self, project_id: str | None = None, @@ -100,57 +103,24 @@ def __init__( service_settings=vertex_ai_settings, ) - # region Non-streaming + # region Overriding base class methods + + # Override from AIServiceClientBase @override - async def get_chat_message_contents( + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return VertexAIChatPromptExecutionSettings + + @override + @trace_chat_completion(VertexAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_chat_message_contents( self, - chat_history: ChatHistory, + chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> list[ChatMessageContent]: - settings = self.get_prompt_execution_settings_from_settings(settings) + ) -> list["ChatMessageContent"]: + if not isinstance(settings, VertexAIChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, VertexAIChatPromptExecutionSettings) # nosec - kernel = kwargs.get("kernel") - if settings.function_choice_behavior is not None and (not kernel or not isinstance(kernel, Kernel)): - raise ServiceInvalidExecutionSettingsError("Kernel is required for auto invoking functions.") - - if kernel and settings.function_choice_behavior: - configure_function_choice_behavior(settings, kernel, update_settings_from_function_choice_configuration) - - if ( - settings.function_choice_behavior is None - or not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - return await self._send_chat_request(chat_history, settings) - - for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): - completions = await self._send_chat_request(chat_history, settings) - chat_history.add_message(message=completions[0]) - function_calls = [item for item in chat_history.messages[-1].items if isinstance(item, FunctionCallContent)] - if (fc_count := len(function_calls)) == 0: - return completions - - results = await invoke_function_calls( - function_calls=function_calls, - chat_history=chat_history, - kernel=kernel, # type: ignore - arguments=kwargs.get("arguments", None), - function_call_count=fc_count, - request_index=request_index, - function_behavior=settings.function_choice_behavior, - ) - - if any(result.terminate for result in results if result is not None): - return completions - else: - # do a final call without auto function calling - return await self._send_chat_request(chat_history, settings) - - async def _send_chat_request( - self, chat_history: ChatHistory, settings: VertexAIChatPromptExecutionSettings - ) -> list[ChatMessageContent]: - """Send a chat request to the Vertex AI service.""" vertexai.init(project=self.service_settings.project_id, location=self.service_settings.region) model = GenerativeModel( self.service_settings.gemini_model_id, @@ -166,6 +136,84 @@ async def _send_chat_request( return [self._create_chat_message_content(response, candidate) for candidate in response.candidates] + @override + @trace_streaming_chat_completion(VertexAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + if not isinstance(settings, VertexAIChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, VertexAIChatPromptExecutionSettings) # nosec + + vertexai.init(project=self.service_settings.project_id, location=self.service_settings.region) + model = GenerativeModel( + self.service_settings.gemini_model_id, + system_instruction=filter_system_message(chat_history), + ) + + response: AsyncIterable[GenerationResponse] = await model.generate_content_async( + contents=self._prepare_chat_history_for_request(chat_history), + generation_config=settings.prepare_settings_dict(), + tools=settings.tools, + tool_config=settings.tool_config, + stream=True, + ) + + async for chunk in response: + yield [self._create_streaming_chat_message_content(chunk, candidate) for candidate in chunk.candidates] + + @override + def _verify_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if not isinstance(settings, VertexAIChatPromptExecutionSettings): + raise ServiceInvalidExecutionSettingsError("The settings must be an VertexAIChatPromptExecutionSettings.") + if settings.candidate_count is not None and settings.candidate_count > 1: + raise ServiceInvalidExecutionSettingsError( + "Auto-invocation of tool calls may only be used with a " + "VertexAIChatPromptExecutionSettings.candidate_count of 1." + ) + + @override + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + return update_settings_from_function_choice_configuration + + @override + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if hasattr(settings, "tool_config"): + settings.tool_config = None + if hasattr(settings, "tools"): + settings.tools = None + + @override + def _prepare_chat_history_for_request( + self, + chat_history: ChatHistory, + role_key: str = "role", + content_key: str = "content", + ) -> list[Content]: + chat_request_messages: list[Content] = [] + + for message in chat_history.messages: + if message.role == AuthorRole.SYSTEM: + # Skip system messages since they are not part of the chat request. + # System message will be provided as system_instruction in the model. + continue + if message.role == AuthorRole.USER: + chat_request_messages.append(Content(role="user", parts=format_user_message(message))) + elif message.role == AuthorRole.ASSISTANT: + chat_request_messages.append(Content(role="model", parts=format_assistant_message(message))) + elif message.role == AuthorRole.TOOL: + chat_request_messages.append(Content(role="function", parts=format_tool_message(message))) + + return chat_request_messages + + # endregion + + # region Non-streaming + def _create_chat_message_content(self, response: GenerationResponse, candidate: Candidate) -> ChatMessageContent: """Create a chat message content object. @@ -209,108 +257,6 @@ def _create_chat_message_content(self, response: GenerationResponse, candidate: # endregion # region Streaming - @override - async def get_streaming_chat_message_contents( - self, - chat_history: ChatHistory, - settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - settings = self.get_prompt_execution_settings_from_settings(settings) - assert isinstance(settings, VertexAIChatPromptExecutionSettings) # nosec - - kernel = kwargs.get("kernel") - if settings.function_choice_behavior is not None and (not kernel or not isinstance(kernel, Kernel)): - raise ServiceInvalidExecutionSettingsError("Kernel is required for auto invoking functions.") - - if kernel and settings.function_choice_behavior: - configure_function_choice_behavior(settings, kernel, update_settings_from_function_choice_configuration) - - if ( - settings.function_choice_behavior is None - or not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - # No auto invoke is required. - async_generator = self._send_chat_streaming_request(chat_history, settings) - else: - # Auto invoke is required. - async_generator = self._get_streaming_chat_message_contents_auto_invoke( - kernel, # type: ignore - kwargs.get("arguments"), - chat_history, - settings, - ) - - async for messages in async_generator: - yield messages - - async def _get_streaming_chat_message_contents_auto_invoke( - self, - kernel: Kernel, - arguments: KernelArguments | None, - chat_history: ChatHistory, - settings: VertexAIChatPromptExecutionSettings, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Get streaming chat message contents from the Google AI service with auto invoking functions.""" - if not settings.function_choice_behavior: - raise ServiceInvalidExecutionSettingsError( - "Function choice behavior is required for auto invoking functions." - ) - - for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): - all_messages: list[StreamingChatMessageContent] = [] - function_call_returned = False - async for messages in self._send_chat_streaming_request(chat_history, settings): - for message in messages: - if message: - all_messages.append(message) - if any(isinstance(item, FunctionCallContent) for item in message.items): - function_call_returned = True - yield messages - - if not function_call_returned: - # Response doesn't contain any function calls. No need to proceed to the next request. - return - - full_completion: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_messages) - function_calls = [item for item in full_completion.items if isinstance(item, FunctionCallContent)] - chat_history.add_message(message=full_completion) - - results = await invoke_function_calls( - function_calls=function_calls, - chat_history=chat_history, - kernel=kernel, - arguments=arguments, - function_call_count=len(function_calls), - request_index=request_index, - function_behavior=settings.function_choice_behavior, - ) - - if any(result.terminate for result in results if result is not None): - return - - async def _send_chat_streaming_request( - self, - chat_history: ChatHistory, - settings: VertexAIChatPromptExecutionSettings, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Send a streaming chat request to the Vertex AI service.""" - vertexai.init(project=self.service_settings.project_id, location=self.service_settings.region) - model = GenerativeModel( - self.service_settings.gemini_model_id, - system_instruction=filter_system_message(chat_history), - ) - - response: AsyncIterable[GenerationResponse] = await model.generate_content_async( - contents=self._prepare_chat_history_for_request(chat_history), - generation_config=settings.prepare_settings_dict(), - tools=settings.tools, - tool_config=settings.tool_config, - stream=True, - ) - - async for chunk in response: - yield [self._create_streaming_chat_message_content(chunk, candidate) for candidate in chunk.candidates] def _create_streaming_chat_message_content( self, @@ -366,29 +312,6 @@ def _create_streaming_chat_message_content( # endregion - @override - def _prepare_chat_history_for_request( - self, - chat_history: ChatHistory, - role_key: str = "role", - content_key: str = "content", - ) -> list[Content]: - chat_request_messages: list[Content] = [] - - for message in chat_history.messages: - if message.role == AuthorRole.SYSTEM: - # Skip system messages since they are not part of the chat request. - # System message will be provided as system_instruction in the model. - continue - if message.role == AuthorRole.USER: - chat_request_messages.append(Content(role="user", parts=format_user_message(message))) - elif message.role == AuthorRole.ASSISTANT: - chat_request_messages.append(Content(role="model", parts=format_assistant_message(message))) - elif message.role == AuthorRole.TOOL: - chat_request_messages.append(Content(role="function", parts=format_tool_message(message))) - - return chat_request_messages - def _get_metadata_from_response(self, response: GenerationResponse) -> dict[str, Any]: """Get metadata from the response. @@ -400,7 +323,10 @@ def _get_metadata_from_response(self, response: GenerationResponse) -> dict[str, """ return { "prompt_feedback": response.prompt_feedback, - "usage": response.usage_metadata, + "usage": CompletionUsage( + prompt_tokens=response.usage_metadata.prompt_token_count, + completion_tokens=response.usage_metadata.candidates_token_count, + ), } def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: @@ -417,10 +343,3 @@ def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: "finish_reason": candidate.finish_reason, "safety_ratings": candidate.safety_ratings, } - - @override - def get_prompt_execution_settings_class( - self, - ) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return VertexAIChatPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_text_completion.py b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_text_completion.py index 6919b6ba521e..f759a026295b 100644 --- a/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_text_completion.py +++ b/python/semantic_kernel/connectors/ai/google/vertex_ai/services/vertex_ai_text_completion.py @@ -9,6 +9,7 @@ from pydantic import ValidationError from vertexai.generative_models import Candidate, GenerationResponse, GenerativeModel +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage from semantic_kernel.connectors.ai.google.vertex_ai.services.vertex_ai_base import VertexAIBase from semantic_kernel.connectors.ai.google.vertex_ai.vertex_ai_prompt_execution_settings import ( VertexAITextPromptExecutionSettings, @@ -19,6 +20,10 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_streaming_text_completion, + trace_text_completion, +) if sys.version_info >= (3, 12): from typing import override # pragma: no cover @@ -72,20 +77,24 @@ def __init__( service_settings=vertex_ai_settings, ) - # region Non-streaming + # region Overriding base class methods + + # Override from AIServiceClientBase @override - async def get_text_contents( + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return VertexAITextPromptExecutionSettings + + @override + @trace_text_completion(VertexAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_text_contents( self, prompt: str, settings: "PromptExecutionSettings", ) -> list[TextContent]: - settings = self.get_prompt_execution_settings_from_settings(settings) + if not isinstance(settings, VertexAITextPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, VertexAITextPromptExecutionSettings) # nosec - return await self._send_request(prompt, settings) - - async def _send_request(self, prompt: str, settings: VertexAITextPromptExecutionSettings) -> list[TextContent]: - """Send a text generation request to the Vertex AI service.""" vertexai.init(project=self.service_settings.project_id, location=self.service_settings.region) model = GenerativeModel(self.service_settings.gemini_model_id) @@ -96,6 +105,31 @@ async def _send_request(self, prompt: str, settings: VertexAITextPromptExecution return [self._create_text_content(response, candidate) for candidate in response.candidates] + @override + @trace_streaming_text_completion(VertexAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_text_contents( + self, + prompt: str, + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list[StreamingTextContent], Any]: + if not isinstance(settings, VertexAITextPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, VertexAITextPromptExecutionSettings) # nosec + + vertexai.init(project=self.service_settings.project_id, location=self.service_settings.region) + model = GenerativeModel(self.service_settings.gemini_model_id) + + response: AsyncIterable[GenerationResponse] = await model.generate_content_async( + contents=prompt, + generation_config=settings.prepare_settings_dict(), + stream=True, + ) + + async for chunk in response: + yield [self._create_streaming_text_content(chunk, candidate) for candidate in chunk.candidates] + + # endregion + def _create_text_content(self, response: GenerationResponse, candidate: Candidate) -> TextContent: """Create a text content object. @@ -116,39 +150,6 @@ def _create_text_content(self, response: GenerationResponse, candidate: Candidat metadata=response_metadata, ) - # endregion - - # region Streaming - @override - async def get_streaming_text_contents( - self, - prompt: str, - settings: "PromptExecutionSettings", - ) -> AsyncGenerator[list["StreamingTextContent"], Any]: - settings = self.get_prompt_execution_settings_from_settings(settings) - assert isinstance(settings, VertexAITextPromptExecutionSettings) # nosec - - async_generator = self._send_streaming_request(prompt, settings) - - async for text_contents in async_generator: - yield text_contents - - async def _send_streaming_request( - self, prompt: str, settings: VertexAITextPromptExecutionSettings - ) -> AsyncGenerator[list[StreamingTextContent], Any]: - """Send a text generation request to the Vertex AI service.""" - vertexai.init(project=self.service_settings.project_id, location=self.service_settings.region) - model = GenerativeModel(self.service_settings.gemini_model_id) - - response: AsyncIterable[GenerationResponse] = await model.generate_content_async( - contents=prompt, - generation_config=settings.prepare_settings_dict(), - stream=True, - ) - - async for chunk in response: - yield [self._create_streaming_text_content(chunk, candidate) for candidate in chunk.candidates] - def _create_streaming_text_content(self, chunk: GenerationResponse, candidate: Candidate) -> StreamingTextContent: """Create a streaming text content object. @@ -170,8 +171,6 @@ def _create_streaming_text_content(self, chunk: GenerationResponse, candidate: C metadata=response_metadata, ) - # endregion - def _get_metadata_from_response(self, response: GenerationResponse) -> dict[str, Any]: """Get metadata from the response. @@ -183,7 +182,10 @@ def _get_metadata_from_response(self, response: GenerationResponse) -> dict[str, """ return { "prompt_feedback": response.prompt_feedback, - "usage": response.usage_metadata, + "usage": CompletionUsage( + prompt_tokens=response.usage_metadata.prompt_token_count, + completion_tokens=response.usage_metadata.candidates_token_count, + ), } def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: @@ -200,10 +202,3 @@ def _get_metadata_from_candidate(self, candidate: Candidate) -> dict[str, Any]: "finish_reason": candidate.finish_reason, "safety_ratings": candidate.safety_ratings, } - - @override - def get_prompt_execution_settings_class( - self, - ) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return VertexAITextPromptExecutionSettings diff --git a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py index 083905e8b0fd..f4f717c7b0e5 100644 --- a/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py +++ b/python/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py @@ -4,7 +4,7 @@ import sys from collections.abc import AsyncGenerator from threading import Thread -from typing import Any, Literal +from typing import Any, ClassVar, Literal if sys.version_info >= (3, 12): from typing import override # pragma: no cover @@ -20,6 +20,10 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions import ServiceInvalidExecutionSettingsError, ServiceResponseException +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_streaming_text_completion, + trace_text_completion, +) logger: logging.Logger = logging.getLogger(__name__) @@ -27,6 +31,8 @@ class HuggingFaceTextCompletion(TextCompletionClientBase): """Hugging Face text completion service.""" + MODEL_PROVIDER_NAME: ClassVar[str] = "huggingface" + task: Literal["summarization", "text-generation", "text2text-generation"] device: str generator: Any @@ -79,20 +85,20 @@ def __init__( generator=generator, ) - async def get_text_contents( + # region Overriding base class methods + + # Override from AIServiceClientBase + @override + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return HuggingFacePromptExecutionSettings + + @override + @trace_text_completion(MODEL_PROVIDER_NAME) + async def _inner_get_text_contents( self, prompt: str, - settings: PromptExecutionSettings, + settings: "PromptExecutionSettings", ) -> list[TextContent]: - """This is the method that is called from the kernel to get a response from a text-optimized LLM. - - Args: - prompt (str): The prompt to send to the LLM. - settings (HuggingFacePromptExecutionSettings): Settings for the request. - - Returns: - List[TextContent]: A list of TextContent objects representing the response(s) from the LLM. - """ if not isinstance(settings, HuggingFacePromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, HuggingFacePromptExecutionSettings) # nosec @@ -101,41 +107,26 @@ async def get_text_contents( results = self.generator(prompt, **settings.prepare_settings_dict()) except Exception as e: raise ServiceResponseException("Hugging Face completion failed") from e + if isinstance(results, list): return [self._create_text_content(results, result) for result in results] return [self._create_text_content(results, results)] - def _create_text_content(self, response: Any, candidate: dict[str, str]) -> TextContent: - return TextContent( - inner_content=response, - ai_model_id=self.ai_model_id, - text=candidate["summary_text" if self.task == "summarization" else "generated_text"], - ) - - async def get_streaming_text_contents( + @override + @trace_streaming_text_completion(MODEL_PROVIDER_NAME) + async def _inner_get_streaming_text_contents( self, prompt: str, - settings: PromptExecutionSettings, + settings: "PromptExecutionSettings", ) -> AsyncGenerator[list[StreamingTextContent], Any]: - """Streams a text completion using a Hugging Face model. - - Note that this method does not support multiple responses. - - Args: - prompt (str): Prompt to complete. - settings (HuggingFacePromptExecutionSettings): Request settings. - - Yields: - List[StreamingTextContent]: List of StreamingTextContent objects. - """ if not isinstance(settings, HuggingFacePromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, HuggingFacePromptExecutionSettings) # nosec if settings.num_return_sequences > 1: raise ServiceInvalidExecutionSettingsError( - "HuggingFace TextIteratorStreamer does not stream multiple responses in a parseable format. \ - If you need multiple responses, please use the complete method.", + "HuggingFace TextIteratorStreamer does not stream multiple responses in a parsable format." + " If you need multiple responses, please use the complete method.", ) try: streamer = TextIteratorStreamer(AutoTokenizer.from_pretrained(self.ai_model_id)) @@ -156,7 +147,11 @@ async def get_streaming_text_contents( except Exception as e: raise ServiceResponseException("Hugging Face completion failed") from e - @override - def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: - """Create a request settings object.""" - return HuggingFacePromptExecutionSettings + # endregion + + def _create_text_content(self, response: Any, candidate: dict[str, str]) -> TextContent: + return TextContent( + inner_content=response, + ai_model_id=self.ai_model_id, + text=candidate["summary_text" if self.task == "summarization" else "generated_text"], + ) diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py index ea6087353c7c..ae11806565e3 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/prompt_execution_settings/mistral_ai_prompt_execution_settings.py @@ -1,9 +1,15 @@ # Copyright (c) Microsoft. All rights reserved. import logging +import sys from typing import Any, Literal -from pydantic import Field, model_validator +if sys.version_info >= (3, 11): + pass # pragma: no cover +else: + pass # pragma: no cover + +from pydantic import Field from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -28,11 +34,14 @@ class MistralAIChatPromptExecutionSettings(MistralAIPromptExecutionSettings): temperature: float | None = Field(None, ge=0.0, le=2.0) top_p: float | None = Field(None, ge=0.0, le=1.0) random_seed: int | None = None - - @model_validator(mode="after") - def check_function_call_behavior(self) -> "MistralAIChatPromptExecutionSettings": - """Check if the user is requesting function call behavior.""" - if self.function_choice_behavior is not None: - raise NotImplementedError("MistralAI does not support function call behavior.") - - return self + tools: list[dict[str, Any]] | None = Field( + None, + max_length=64, + description="Do not set this manually. It is set by the service based on the function choice configuration.", + ) + tool_choice: str | None = Field( + None, + description="Do not set this manually. It is set by the service based on the function choice configuration.", + ) + + diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_base.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_base.py new file mode 100644 index 000000000000..0e18409f9e08 --- /dev/null +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_base.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC +from typing import ClassVar + +from mistralai.async_client import MistralAsyncClient + +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +class MistralAIBase(KernelBaseModel, ABC): + """Mistral AI service base.""" + + MODEL_PROVIDER_NAME: ClassVar[str] = "mistralai" + + async_client: MistralAsyncClient diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py index ffd6bc2594ad..7d1fb24d303b 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_chat_completion.py @@ -1,8 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. import logging -from collections.abc import AsyncGenerator -from typing import Any +import sys +from collections.abc import AsyncGenerator, Callable +from typing import Any, ClassVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover from mistralai.async_client import MistralAsyncClient from mistralai.models.chat_completion import ( @@ -16,36 +22,41 @@ from pydantic import ValidationError from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration +from semantic_kernel.connectors.ai.function_calling_utils import kernel_function_metadata_to_function_call_format +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceType from semantic_kernel.connectors.ai.mistral_ai.prompt_execution_settings.mistral_ai_prompt_execution_settings import ( MistralAIChatPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.mistral_ai.services.mistral_ai_base import MistralAIBase from semantic_kernel.connectors.ai.mistral_ai.settings.mistral_ai_settings import MistralAISettings from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents import ( + ChatMessageContent, + FunctionCallContent, + StreamingChatMessageContent, + StreamingTextContent, + TextContent, +) from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.function_call_content import FunctionCallContent -from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.streaming_text_content import StreamingTextContent -from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason -from semantic_kernel.exceptions.service_exceptions import ( - ServiceInitializationError, - ServiceResponseException, -) +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceResponseException from semantic_kernel.utils.experimental_decorator import experimental_class +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) logger: logging.Logger = logging.getLogger(__name__) @experimental_class -class MistralAIChatCompletion(ChatCompletionClientBase): +class MistralAIChatCompletion(MistralAIBase, ChatCompletionClientBase): """Mistral Chat completion class.""" - prompt_tokens: int = 0 - completion_tokens: int = 0 - total_tokens: int = 0 - async_client: MistralAsyncClient + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True def __init__( self, @@ -64,10 +75,10 @@ def __init__( service_id (str | None): Service ID tied to the execution settings. api_key (str | None): The optional API key to use. If provided will override, the env vars or .env file value. - async_client (MistralAsyncClient | None) : An existing client to use. + async_client (MistralAsyncClient | None) : An existing client to use. env_file_path (str | None): Use the environment settings file as a fallback - to environment variables. - env_file_encoding (str | None): The encoding of the environment settings file. + to environment variables. + env_file_encoding (str | None): The encoding of the environment settings file. """ try: mistralai_settings = MistralAISettings.create( @@ -78,7 +89,7 @@ def __init__( ) except ValidationError as ex: raise ServiceInitializationError("Failed to create MistralAI settings.", ex) from ex - + if not mistralai_settings.chat_model_id: raise ServiceInitializationError("The MistralAI chat model ID is required.") @@ -93,69 +104,61 @@ def __init__( ai_model_id=ai_model_id or mistralai_settings.chat_model_id, ) - async def get_chat_message_contents( + # region Overriding base class methods + + # Override from AIServiceClientBase + @override + def get_prompt_execution_settings_class(self) -> "type[MistralAIChatPromptExecutionSettings]": + """Create a request settings object.""" + return MistralAIChatPromptExecutionSettings + + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + if hasattr(self.async_client, "_endpoint"): + # Best effort to get the endpoint + return self.async_client._endpoint + return None + + @override + @trace_chat_completion(MistralAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_chat_message_contents( self, chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> list["ChatMessageContent"]: - """Executes a chat completion request and returns the result. - - Args: - chat_history (ChatHistory): The chat history to use for the chat completion. - settings (PromptExecutionSettings): The settings to use - for the chat completion request. - kwargs (Dict[str, Any]): The optional arguments. - - Returns: - List[ChatMessageContent]: The completion result(s). - """ + ) -> list["ChatMessageContent"]: if not isinstance(settings, MistralAIChatPromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, MistralAIChatPromptExecutionSettings) # nosec - if not settings.ai_model_id: - settings.ai_model_id = self.ai_model_id - + settings.ai_model_id = settings.ai_model_id or self.ai_model_id settings.messages = self._prepare_chat_history_for_request(chat_history) + try: response = await self.async_client.chat(**settings.prepare_settings_dict()) except Exception as ex: raise ServiceResponseException( f"{type(self)} service failed to complete the prompt", ex, - ) from ex - - self.store_usage(response) + ) from ex + response_metadata = self._get_metadata_from_response(response) return [self._create_chat_message_content(response, choice, response_metadata) for choice in response.choices] - - async def get_streaming_chat_message_contents( - self, - chat_history: ChatHistory, - settings: PromptExecutionSettings, - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Executes a streaming chat completion request and returns the result. - Args: - chat_history (ChatHistory): The chat history to use for the chat completion. - settings (PromptExecutionSettings): The settings to use - for the chat completion request. - kwargs (Dict[str, Any]): The optional arguments. - - Yields: - List[StreamingChatMessageContent]: A stream of - StreamingChatMessageContent when using Azure. - """ + @override + @trace_streaming_chat_completion(MistralAIBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: if not isinstance(settings, MistralAIChatPromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, MistralAIChatPromptExecutionSettings) # nosec - if not settings.ai_model_id: - settings.ai_model_id = self.ai_model_id - + settings.ai_model_id = settings.ai_model_id or self.ai_model_id settings.messages = self._prepare_chat_history_for_request(chat_history) + try: response = self.async_client.chat_stream(**settings.prepare_settings_dict()) except Exception as ex: @@ -171,6 +174,8 @@ async def get_streaming_chat_message_contents( self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) for choice in chunk.choices ] + # endregion + # region content conversion to SK def _create_chat_message_content( @@ -181,7 +186,7 @@ def _create_chat_message_content( metadata.update(response_metadata) items: list[Any] = self._get_tool_calls_from_chat_choice(choice) - + if choice.message.content: items.append(TextContent(text=choice.message.content)) @@ -220,8 +225,7 @@ def _create_streaming_chat_message_content( ) def _get_metadata_from_response( - self, - response: ChatCompletionResponse | ChatCompletionStreamResponse + self, response: ChatCompletionResponse | ChatCompletionStreamResponse ) -> dict[str, Any]: """Get metadata from a chat response.""" metadata: dict[str, Any] = { @@ -230,28 +234,32 @@ def _get_metadata_from_response( } # Check if usage exists and has a value, then add it to the metadata if hasattr(response, "usage") and response.usage is not None: - metadata["usage"] = response.usage - + metadata["usage"] = ( + CompletionUsage( + prompt_tokens=response.usage.prompt_tokens, + completion_tokens=response.usage.completion_tokens, + ), + ) + return metadata def _get_metadata_from_chat_choice( - self, - choice: ChatCompletionResponseChoice | ChatCompletionResponseStreamChoice + self, choice: ChatCompletionResponseChoice | ChatCompletionResponseStreamChoice ) -> dict[str, Any]: """Get metadata from a chat choice.""" return { "logprobs": getattr(choice, "logprobs", None), } - - def _get_tool_calls_from_chat_choice(self, - choice: ChatCompletionResponseChoice | ChatCompletionResponseStreamChoice + + def _get_tool_calls_from_chat_choice( + self, choice: ChatCompletionResponseChoice | ChatCompletionResponseStreamChoice ) -> list[FunctionCallContent]: """Get tool calls from a chat choice.""" - content: ChatMessage | DeltaMessage + content: ChatMessage | DeltaMessage content = choice.message if isinstance(choice, ChatCompletionResponseChoice) else choice.delta if content.tool_calls is None: return [] - + return [ FunctionCallContent( id=tool.id, @@ -264,15 +272,39 @@ def _get_tool_calls_from_chat_choice(self, # endregion - def get_prompt_execution_settings_class(self) -> "type[MistralAIChatPromptExecutionSettings]": - """Create a request settings object.""" - return MistralAIChatPromptExecutionSettings - - def store_usage(self, response): - """Store the usage information from the response.""" - if not isinstance(response, AsyncGenerator): - logger.info(f"MistralAI usage: {response.usage}") - self.prompt_tokens += response.usage.prompt_tokens - self.total_tokens += response.usage.total_tokens - if hasattr(response.usage, "completion_tokens"): - self.completion_tokens += response.usage.completion_tokens + def update_settings_from_function_call_configuration_mistral( + self, + function_choice_configuration: "FunctionCallChoiceConfiguration", + settings: "PromptExecutionSettings", + type: "FunctionChoiceType", + ) -> None: + """Update the settings from a FunctionChoiceConfiguration.""" + if ( + function_choice_configuration.available_functions + and hasattr(settings, "tool_choice") + and hasattr(settings, "tools") + ): + settings.tool_choice = type + settings.tools = [ + kernel_function_metadata_to_function_call_format(f) + for f in function_choice_configuration.available_functions + ] + # Function Choice behavior required maps to MistralAI any + if ( + settings.function_choice_behavior + and settings.function_choice_behavior.type_ == FunctionChoiceType.REQUIRED + ): + settings.tool_choice = "any" + + @override + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + return self.update_settings_from_function_call_configuration_mistral + + @override + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if hasattr(settings, "tool_choice"): + settings.tool_choice = None + if hasattr(settings, "tools"): + settings.tools = None diff --git a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py index 24b2905b1587..8bf76e5303b1 100644 --- a/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/mistral_ai/services/mistral_ai_text_embedding.py @@ -6,6 +6,7 @@ from typing import Any, override # pragma: no cover else: from typing_extensions import Any, override # pragma: no cover + import logging from mistralai.async_client import MistralAsyncClient @@ -14,6 +15,7 @@ from pydantic import ValidationError from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase +from semantic_kernel.connectors.ai.mistral_ai.services.mistral_ai_base import MistralAIBase from semantic_kernel.connectors.ai.mistral_ai.settings.mistral_ai_settings import MistralAISettings from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceResponseException @@ -23,19 +25,17 @@ @experimental_class -class MistralAITextEmbedding(EmbeddingGeneratorBase): +class MistralAITextEmbedding(MistralAIBase, EmbeddingGeneratorBase): """Mistral AI Inference Text Embedding Service.""" - client: MistralAsyncClient - def __init__( self, ai_model_id: str | None = None, api_key: str | None = None, service_id: str | None = None, + async_client: MistralAsyncClient | None = None, env_file_path: str | None = None, env_file_encoding: str | None = None, - client: MistralAsyncClient | None = None, ) -> None: """Initialize the Mistral AI Text Embedding service. @@ -45,12 +45,12 @@ def __init__( - MISTRALAI_EMBEDDING_MODEL_ID Args: - ai_model_id: (str | None): A string that is used to identify the model such as the model name. - api_key (str | None): The API key for the Mistral AI service deployment. - service_id (str | None): Service ID for the embedding completion service. - env_file_path (str | None): The path to the environment file. - env_file_encoding (str | None): The encoding of the environment file. - client (MistralAsyncClient | None): The Mistral AI client to use. + ai_model_id: (str | None): A string that is used to identify the model such as the model name. + api_key (str | None): The API key for the Mistral AI service deployment. + service_id (str | None): Service ID for the embedding completion service. + async_client (MistralAsyncClient | None): The Mistral AI client to use. + env_file_path (str | None): The path to the environment file. + env_file_encoding (str | None): The encoding of the environment file. Raises: ServiceInitializationError: If an error occurs during initialization. @@ -68,15 +68,13 @@ def __init__( if not mistralai_settings.embedding_model_id: raise ServiceInitializationError("The MistralAI embedding model ID is required.") - if not client: - client = MistralAsyncClient( - api_key=mistralai_settings.api_key.get_secret_value() - ) + if not async_client: + async_client = MistralAsyncClient(api_key=mistralai_settings.api_key.get_secret_value()) super().__init__( service_id=service_id or mistralai_settings.embedding_model_id, ai_model_id=ai_model_id or mistralai_settings.embedding_model_id, - client=client, + async_client=async_client, ) @override @@ -98,10 +96,8 @@ async def generate_raw_embeddings( ) -> Any: """Generate embeddings from the Mistral AI service.""" try: - - embedding_response: EmbeddingResponse = await self.client.embeddings( - model=self.ai_model_id, - input=texts + embedding_response: EmbeddingResponse = await self.async_client.embeddings( + model=self.ai_model_id, input=texts ) except Exception as ex: raise ServiceResponseException( diff --git a/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py index 7e365bea3d5c..6ff69be7dc12 100644 --- a/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/ollama/ollama_prompt_execution_settings.py @@ -11,6 +11,9 @@ class OllamaPromptExecutionSettings(PromptExecutionSettings): format: Literal["json"] | None = None options: dict[str, Any] | None = None + # TODO(@taochen): Add individual properties for execution settings and + # convert them to the appropriate types in the options dictionary. + class OllamaTextPromptExecutionSettings(OllamaPromptExecutionSettings): """Settings for Ollama text prompt execution.""" diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_base.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_base.py index ceffb48d9dbf..f03ad0e994d1 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_base.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_base.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. from abc import ABC +from typing import ClassVar from ollama import AsyncClient @@ -14,4 +15,6 @@ class OllamaBase(KernelBaseModel, ABC): client [AsyncClient]: An Ollama client to use for the service. """ + MODEL_PROVIDER_NAME: ClassVar[str] = "ollama" + client: AsyncClient diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py index 1c3ffe3080b7..17a35533e143 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_chat_completion.py @@ -3,28 +3,30 @@ import logging import sys from collections.abc import AsyncGenerator, AsyncIterator, Mapping -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, ClassVar if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: from typing_extensions import override # pragma: no cover -from ollama import AsyncClient, Message +import httpx +from ollama import AsyncClient from pydantic import ValidationError from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import OllamaChatPromptExecutionSettings from semantic_kernel.connectors.ai.ollama.ollama_settings import OllamaSettings from semantic_kernel.connectors.ai.ollama.services.ollama_base import OllamaBase -from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase from semantic_kernel.contents import AuthorRole from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent -from semantic_kernel.contents.streaming_text_content import StreamingTextContent -from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidResponseError +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -32,12 +34,14 @@ logger: logging.Logger = logging.getLogger(__name__) -class OllamaChatCompletion(OllamaBase, TextCompletionClientBase, ChatCompletionClientBase): +class OllamaChatCompletion(OllamaBase, ChatCompletionClientBase): """Initializes a new instance of the OllamaChatCompletion class. Make sure to have the ollama service running either locally or remotely. """ + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = False + def __init__( self, service_id: str | None = None, @@ -68,30 +72,42 @@ def __init__( except ValidationError as ex: raise ServiceInitializationError("Failed to create Ollama settings.", ex) from ex + if not ollama_settings.model: + raise ServiceInitializationError("Please provide ai_model_id or OLLAMA_MODEL env variable is required") + super().__init__( service_id=service_id or ollama_settings.model, ai_model_id=ollama_settings.model, client=client or AsyncClient(host=ollama_settings.host), ) - async def get_chat_message_contents( + # region Overriding base class methods + + # Override from AIServiceClientBase + @override + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + """Get the request settings class.""" + return OllamaChatPromptExecutionSettings + + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + if hasattr(self.client, "_client") and isinstance(self.client._client, httpx.AsyncClient): + # Best effort to get the endpoint + return str(self.client._client.base_url) + return None + + @override + @trace_chat_completion(OllamaBase.MODEL_PROVIDER_NAME) + async def _inner_get_chat_message_contents( self, - chat_history: ChatHistory, + chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> list[ChatMessageContent]: - """This is the method that is called from the kernel to get a response from a chat-optimized LLM. - - Args: - chat_history (ChatHistory): A chat history that contains a list of chat messages, - that can be rendered into a set of messages, from system, user, assistant and function. - settings (PromptExecutionSettings): Settings for the request. - kwargs (Dict[str, Any]): The optional arguments. + ) -> list["ChatMessageContent"]: + if not isinstance(settings, OllamaChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, OllamaChatPromptExecutionSettings) # nosec - Returns: - List[ChatMessageContent]: A list of ChatMessageContent objects representing the response(s) from the LLM. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) prepared_chat_history = self._prepare_chat_history_for_request(chat_history) response_object = await self.client.chat( @@ -116,26 +132,17 @@ async def get_chat_message_contents( ) ] - async def get_streaming_chat_message_contents( + @override + @trace_streaming_chat_completion(OllamaBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( self, - chat_history: ChatHistory, + chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: - """Streams a text completion using an Ollama model. + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + if not isinstance(settings, OllamaChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, OllamaChatPromptExecutionSettings) # nosec - Note that this method does not support multiple responses. - - Args: - chat_history (ChatHistory): A chat history that contains a list of chat messages, - that can be rendered into a set of messages, from system, user, assistant and function. - settings (PromptExecutionSettings): Request settings. - kwargs (Dict[str, Any]): The optional arguments. - - Yields: - List[StreamingChatMessageContent]: Stream of StreamingChatMessageContent objects. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) prepared_chat_history = self._prepare_chat_history_for_request(chat_history) response_object = await self.client.chat( @@ -162,87 +169,4 @@ async def get_streaming_chat_message_contents( ) ] - async def get_text_contents( - self, - prompt: str, - settings: "PromptExecutionSettings", - ) -> list[TextContent]: - """This is the method that is called from the kernel to get a response from a text-optimized LLM. - - Args: - prompt (str): A prompt to complete - settings (PromptExecutionSettings): Settings for the request. - - Returns: - List["TextContent"]: The completion result(s). - """ - settings = self.get_prompt_execution_settings_from_settings(settings) - prepared_chat_history = [Message(role=AuthorRole.USER.value, content=prompt)] - - response_object = await self.client.chat( - model=self.ai_model_id, - messages=prepared_chat_history, - stream=False, - **settings.prepare_settings_dict(), - ) - - if not isinstance(response_object, Mapping): - raise ServiceInvalidResponseError( - "Invalid response type from Ollama chat completion. " - f"Expected Mapping but got {type(response_object)}." - ) - - return [ - TextContent( - inner_content=response_object, - ai_model_id=self.ai_model_id, - text=response_object.get("message", {"content": None}).get("content", None), - ) - ] - - async def get_streaming_text_contents( - self, - prompt: str, - settings: "PromptExecutionSettings", - ) -> AsyncGenerator[list[StreamingTextContent], Any]: - """Streams a text completion using an Ollama model. - - Note that this method does not support multiple responses. - - Args: - prompt (str): A chat history that contains the prompt to complete. - settings (PromptExecutionSettings): Request settings. - - Yields: - List["StreamingTextContent"]: The result stream made up of StreamingTextContent objects. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) - prepared_chat_history = [Message(role=AuthorRole.USER.value, content=prompt)] - - response_object = await self.client.chat( - model=self.ai_model_id, - messages=prepared_chat_history, - stream=True, - **settings.prepare_settings_dict(), - ) - - if not isinstance(response_object, AsyncIterator): - raise ServiceInvalidResponseError( - "Invalid response type from Ollama chat completion. " - f"Expected AsyncIterator but got {type(response_object)}." - ) - - async for part in response_object: - yield [ - StreamingTextContent( - choice_index=0, - inner_content=part, - ai_model_id=self.ai_model_id, - text=part.get("message", {"content": None}).get("content", None), - ) - ] - - @override - def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return OllamaChatPromptExecutionSettings + # endregion diff --git a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py index 351c4e768fea..c492f6e08a78 100644 --- a/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py +++ b/python/semantic_kernel/connectors/ai/ollama/services/ollama_text_completion.py @@ -10,6 +10,7 @@ else: from typing_extensions import override # pragma: no cover +import httpx from ollama import AsyncClient from pydantic import ValidationError @@ -20,6 +21,10 @@ from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidResponseError +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_streaming_text_completion, + trace_text_completion, +) if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -69,21 +74,31 @@ def __init__( client=client or AsyncClient(host=ollama_settings.host), ) - async def get_text_contents( + # region Overriding base class methods + + # Override from AIServiceClientBase + @override + def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: + return OllamaTextPromptExecutionSettings + + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + if hasattr(self.client, "_client") and isinstance(self.client._client, httpx.AsyncClient): + # Best effort to get the endpoint + return str(self.client._client.base_url) + return None + + @override + @trace_text_completion(OllamaBase.MODEL_PROVIDER_NAME) + async def _inner_get_text_contents( self, prompt: str, settings: "PromptExecutionSettings", ) -> list[TextContent]: - """This is the method that is called from the kernel to get a response from a text-optimized LLM. - - Args: - prompt (str): The prompt to send to the LLM. - settings (OllamaTextPromptExecutionSettings): Settings for the request. - - Returns: - List[TextContent]: A list of TextContent objects representing the response(s) from the LLM. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) + if not isinstance(settings, OllamaTextPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, OllamaTextPromptExecutionSettings) # nosec response_object = await self.client.generate( model=self.ai_model_id, @@ -102,24 +117,16 @@ async def get_text_contents( text = inner_content["response"] return [TextContent(inner_content=inner_content, ai_model_id=self.ai_model_id, text=text)] - async def get_streaming_text_contents( + @override + @trace_streaming_text_completion(OllamaBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_text_contents( self, prompt: str, settings: "PromptExecutionSettings", ) -> AsyncGenerator[list[StreamingTextContent], Any]: - """Streams a text completion using an Ollama model. - - Note that this method does not support multiple responses, - but the result will be a list anyway. - - Args: - prompt (str): Prompt to complete. - settings (OllamaTextPromptExecutionSettings): Request settings. - - Yields: - List[StreamingTextContent]: Completion result. - """ - settings = self.get_prompt_execution_settings_from_settings(settings) + if not isinstance(settings, OllamaTextPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, OllamaTextPromptExecutionSettings) # nosec response_object = await self.client.generate( model=self.ai_model_id, @@ -141,7 +148,4 @@ async def get_streaming_text_contents( ) ] - @override - def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: - """Get the request settings class.""" - return OllamaTextPromptExecutionSettings + # endregion diff --git a/python/semantic_kernel/connectors/ai/open_ai/const.py b/python/semantic_kernel/connectors/ai/open_ai/const.py index 5291ee622608..749e7d548e57 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/const.py +++ b/python/semantic_kernel/connectors/ai/open_ai/const.py @@ -2,4 +2,4 @@ from typing import Final -DEFAULT_AZURE_API_VERSION: Final[str] = "2024-02-01" +DEFAULT_AZURE_API_VERSION: Final[str] = "2024-06-01" diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py index 32cfd6f9ae31..19ec573da19b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/azure_chat_prompt_execution_settings.py @@ -42,6 +42,30 @@ class ApiKeyAuthentication(AzureChatRequestBase): key: str | None = None +class SystemAssignedManagedIdentityAuthentication(AzureChatRequestBase): + """System assigned managed identity authentication.""" + + type: Annotated[ + Literal["SystemAssignedManagedIdentity", "system_assigned_managed_identity"], AfterValidator(to_snake) + ] = "system_assigned_managed_identity" + + +class UserAssignedManagedIdentityAuthentication(AzureChatRequestBase): + """User assigned managed identity authentication.""" + + type: Annotated[ + Literal["UserAssignedManagedIdentity", "user_assigned_managed_identity"], AfterValidator(to_snake) + ] = "user_assigned_managed_identity" + managed_identity_resource_id: str | None + + +class AccessTokenAuthentication(AzureChatRequestBase): + """Access token authentication.""" + + type: Annotated[Literal["AccessToken", "access_token"], AfterValidator(to_snake)] = "access_token" + access_token: str | None + + class AzureEmbeddingDependency(AzureChatRequestBase): """Azure embedding dependency.""" @@ -98,7 +122,13 @@ class AzureAISearchDataSourceParameters(AzureDataSourceParameters): query_type: Annotated[ Literal["simple", "semantic", "vector", "vectorSimpleHybrid", "vectorSemanticHybrid"], AfterValidator(to_snake) ] = "simple" - authentication: ApiKeyAuthentication | None = None + authentication: ( + ApiKeyAuthentication + | SystemAssignedManagedIdentityAuthentication + | UserAssignedManagedIdentityAuthentication + | AccessTokenAuthentication + | None + ) = None class AzureAISearchDataSource(AzureChatRequestBase): diff --git a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py index f4aa7868f5fc..d65f086eba73 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/prompt_execution_settings/open_ai_prompt_execution_settings.py @@ -9,7 +9,7 @@ else: from typing_extensions import Self # pragma: no cover -from pydantic import Field, field_validator, model_validator +from pydantic import BaseModel, Field, field_validator, model_validator from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -61,7 +61,9 @@ def check_best_of_and_n(self) -> "OpenAITextPromptExecutionSettings": class OpenAIChatPromptExecutionSettings(OpenAIPromptExecutionSettings): """Specific settings for the Chat Completion endpoint.""" - response_format: dict[Literal["type"], Literal["text", "json_object"]] | None = None + response_format: ( + dict[Literal["type"], Literal["text", "json_object"]] | dict[str, Any] | type[BaseModel] | type | None + ) = None function_call: str | None = None functions: list[dict[str, Any]] | None = None messages: list[dict[str, Any]] | None = None @@ -75,6 +77,11 @@ class OpenAIChatPromptExecutionSettings(OpenAIPromptExecutionSettings): None, description="Do not set this manually. It is set by the service based on the function choice configuration.", ) + structured_json_response: bool = Field(False, description="Do not set this manually. It is set by the service.") + stream_options: dict[str, Any] | None = Field( + None, + description="Additional options to pass when streaming is used. Do not set this manually.", + ) @field_validator("functions", "function_call", mode="after") @classmethod @@ -86,6 +93,37 @@ def validate_function_call(cls, v: str | list[dict[str, Any]] | None = None): ) return v + @model_validator(mode="before") + def validate_response_format_and_set_flag(cls, values) -> Any: + """Validate the response_format and set structured_json_response accordingly.""" + response_format = values.get("response_format", None) + + if response_format is None: + return values + + if isinstance(response_format, dict): + if response_format.get("type") == "json_object": + return values + if response_format.get("type") == "json_schema": + json_schema = response_format.get("json_schema") + if isinstance(json_schema, dict): + values["structured_json_response"] = True + return values + raise ServiceInvalidExecutionSettingsError( + "If response_format has type 'json_schema', 'json_schema' must be a valid dictionary." + ) + if isinstance(response_format, type): + if issubclass(response_format, BaseModel): + values["structured_json_response"] = True + else: + values["structured_json_response"] = True + else: + raise ServiceInvalidExecutionSettingsError( + "response_format must be a dictionary, a subclass of BaseModel, a Python class/type, or None" + ) + + return values + @model_validator(mode="before") @classmethod def validate_function_calling_behaviors(cls, data) -> Any: diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py index d91fb24ab02b..6b939e7129e1 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py @@ -1,12 +1,19 @@ # Copyright (c) Microsoft. All rights reserved. + import json import logging -from collections.abc import Mapping +import sys +from collections.abc import AsyncGenerator, Mapping from copy import deepcopy from typing import Any, TypeVar from uuid import uuid4 -from openai import AsyncAzureOpenAI +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from openai import AsyncAzureOpenAI, AsyncStream from openai.lib.azure import AsyncAzureADTokenProvider from openai.types.chat.chat_completion import ChatCompletion, Choice from openai.types.chat.chat_completion_chunk import ChatCompletionChunk @@ -16,19 +23,24 @@ from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( AzureChatPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, +) from semantic_kernel.connectors.ai.open_ai.services.azure_config_base import AzureOpenAIConfigBase from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base import OpenAIChatCompletionBase from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIModelTypes from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base import OpenAITextCompletionBase from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.finish_reason import FinishReason -from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidResponseError +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import trace_streaming_chat_completion logger: logging.Logger = logging.getLogger(__name__) @@ -48,6 +60,7 @@ def __init__( api_version: str | None = None, ad_token: str | None = None, ad_token_provider: AsyncAzureADTokenProvider | None = None, + token_endpoint: str | None = None, default_headers: Mapping[str, str] | None = None, async_client: AsyncAzureOpenAI | None = None, env_file_path: str | None = None, @@ -69,6 +82,7 @@ def __init__( in the env vars or .env file. ad_token (str | None): The Azure Active Directory token. (Optional) ad_token_provider (AsyncAzureADTokenProvider): The Azure Active Directory token provider. (Optional) + token_endpoint (str | None): The token endpoint to request an Azure token. (Optional) default_headers (Mapping[str, str]): The default headers mapping of string keys to string values for HTTP requests. (Optional) async_client (AsyncAzureOpenAI | None): An existing client to use. (Optional) @@ -84,6 +98,7 @@ def __init__( api_version=api_version, env_file_path=env_file_path, env_file_encoding=env_file_encoding, + token_endpoint=token_endpoint, ) except ValidationError as exc: raise ServiceInitializationError(f"Failed to validate settings: {exc}") from exc @@ -91,8 +106,23 @@ def __init__( if not azure_openai_settings.chat_deployment_name: raise ServiceInitializationError("chat_deployment_name is required.") - if not azure_openai_settings.api_key and not ad_token and not ad_token_provider: - raise ServiceInitializationError("Please provide either api_key, ad_token or ad_token_provider") + # If the async_client is None, the api_key is none, the ad_token is none, and the ad_token_provider is none, + # then we will attempt to get the ad_token using the default endpoint specified in the Azure OpenAI settings. + if ( + async_client is None + and azure_openai_settings.api_key is None + and ad_token_provider is None + and ad_token is None + and azure_openai_settings.token_endpoint + ): + ad_token = azure_openai_settings.get_azure_openai_auth_token( + token_endpoint=azure_openai_settings.token_endpoint + ) + + if not async_client and not azure_openai_settings.api_key and not ad_token and not ad_token_provider: + raise ServiceInitializationError( + "Please provide either a custom client, or an api_key, an ad_token or an ad_token_provider" + ) super().__init__( deployment_name=azure_openai_settings.chat_deployment_name, @@ -108,6 +138,42 @@ def __init__( client=async_client, ) + @override + @trace_streaming_chat_completion(OpenAIChatCompletionBase.MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + """Override the base method. + + This is because the latest Azure OpenAI API GA version doesn't support `stream_option` + yet and it will potentially result in errors if the option is included. + This method will be called instead of the base method. + TODO: Remove this method when the `stream_option` is supported by the Azure OpenAI API. + GitHub Issue: https://github.com/microsoft/semantic-kernel/issues/8996 + """ + if not isinstance(settings, OpenAIChatPromptExecutionSettings): + settings = self.get_prompt_execution_settings_from_settings(settings) + assert isinstance(settings, OpenAIChatPromptExecutionSettings) # nosec + + settings.stream = True + settings.messages = self._prepare_chat_history_for_request(chat_history) + settings.ai_model_id = settings.ai_model_id or self.ai_model_id + + response = await self._send_request(request_settings=settings) + if not isinstance(response, AsyncStream): + raise ServiceInvalidResponseError("Expected an AsyncStream[ChatCompletionChunk] response.") + async for chunk in response: + if len(chunk.choices) == 0: + continue + + assert isinstance(chunk, ChatCompletionChunk) # nosec + chunk_metadata = self._get_metadata_from_streaming_chat_response(chunk) + yield [ + self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) for choice in chunk.choices + ] + @classmethod def from_dict(cls, settings: dict[str, Any]) -> "AzureChatCompletion": """Initialize an Azure OpenAI service from a dictionary of settings. diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py index de911d543836..955aff4e2ced 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py @@ -70,6 +70,24 @@ def __init__( if not azure_openai_settings.text_deployment_name: raise ServiceInitializationError("The Azure Text deployment name is required.") + # If the api_key is none, and the ad_token is none, and the ad_token_provider is none, + # then we will attempt to get the ad_token using the default endpoint specified in the Azure OpenAI settings. + if ( + azure_openai_settings.api_key is None + and ad_token_provider is None + and azure_openai_settings.token_endpoint + and ad_token is None + and async_client is None + ): + ad_token = azure_openai_settings.get_azure_openai_auth_token( + token_endpoint=azure_openai_settings.token_endpoint + ) + + if not azure_openai_settings.api_key and not ad_token and not ad_token_provider and not async_client: + raise ServiceInitializationError( + "Please provide either api_key, ad_token, ad_token_provider, or a custom client." + ) + super().__init__( deployment_name=azure_openai_settings.text_deployment_name, endpoint=azure_openai_settings.endpoint, diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py index 177d2d28815f..06d38300bb1a 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py @@ -72,6 +72,24 @@ def __init__( if not azure_openai_settings.embedding_deployment_name: raise ServiceInitializationError("The Azure OpenAI embedding deployment name is required.") + # If the api_key is none, and the ad_token is none, and the ad_token_provider is none, + # then we will attempt to get the ad_token using the default endpoint specified in the Azure OpenAI settings. + if ( + azure_openai_settings.api_key is None + and ad_token_provider is None + and azure_openai_settings.token_endpoint + and ad_token is None + and async_client is None + ): + ad_token = azure_openai_settings.get_azure_openai_auth_token( + token_endpoint=azure_openai_settings.token_endpoint + ) + + if not azure_openai_settings.api_key and not ad_token and not ad_token_provider and not async_client: + raise ServiceInitializationError( + "Please provide either api_key, ad_token, ad_token_provider, or a custom client" + ) + super().__init__( deployment_name=azure_openai_settings.embedding_deployment_name, endpoint=azure_openai_settings.endpoint, diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py index f1d6099e8e7d..c85c49376140 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py @@ -1,14 +1,9 @@ # Copyright (c) Microsoft. All rights reserved. -import asyncio -import logging import sys -from collections.abc import AsyncGenerator -from functools import reduce +from collections.abc import AsyncGenerator, Callable from typing import TYPE_CHECKING, Any, ClassVar, cast -from semantic_kernel.contents.function_result_content import FunctionResultContent - if sys.version_info >= (3, 12): from typing import override # pragma: no cover else: @@ -23,13 +18,16 @@ from typing_extensions import deprecated from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior +from semantic_kernel.connectors.ai.function_call_choice_configuration import FunctionCallChoiceConfiguration from semantic_kernel.connectors.ai.function_calling_utils import update_settings_from_function_call_configuration -from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior, FunctionChoiceType from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIChatPromptExecutionSettings, ) from semantic_kernel.connectors.ai.open_ai.services.open_ai_handler import OpenAIHandler +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.chat_message_content import ChatMessageContent from semantic_kernel.contents.function_call_content import FunctionCallContent @@ -42,235 +40,126 @@ from semantic_kernel.filters.auto_function_invocation.auto_function_invocation_context import ( AutoFunctionInvocationContext, ) -from semantic_kernel.utils.telemetry.decorators import trace_chat_completion +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, +) if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.kernel import Kernel -logger: logging.Logger = logging.getLogger(__name__) - - -class InvokeTermination(Exception): - """Exception for termination of function invocation.""" - - pass - class OpenAIChatCompletionBase(OpenAIHandler, ChatCompletionClientBase): """OpenAI Chat completion class.""" MODEL_PROVIDER_NAME: ClassVar[str] = "openai" + SUPPORTS_FUNCTION_CALLING: ClassVar[bool] = True # region Overriding base class methods # most of the methods are overridden from the ChatCompletionClientBase class, otherwise it is mentioned + # Override from AIServiceClientBase @override def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: return OpenAIChatPromptExecutionSettings + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + return str(self.client.base_url) + @override @trace_chat_completion(MODEL_PROVIDER_NAME) - async def get_chat_message_contents( + async def _inner_get_chat_message_contents( self, - chat_history: ChatHistory, + chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, ) -> list["ChatMessageContent"]: if not isinstance(settings, OpenAIChatPromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, OpenAIChatPromptExecutionSettings) # nosec - # For backwards compatibility we need to convert the `FunctionCallBehavior` to `FunctionChoiceBehavior` - # if this method is called with a `FunctionCallBehavior` object as part of the settings - if hasattr(settings, "function_call_behavior") and isinstance( - settings.function_call_behavior, FunctionCallBehavior - ): - settings.function_choice_behavior = FunctionChoiceBehavior.from_function_call_behavior( - settings.function_call_behavior - ) - - kernel = kwargs.get("kernel", None) - if settings.function_choice_behavior is not None: - if kernel is None: - raise ServiceInvalidExecutionSettingsError("The kernel is required for OpenAI tool calls.") - if settings.number_of_responses is not None and settings.number_of_responses > 1: - raise ServiceInvalidExecutionSettingsError( - "Auto-invocation of tool calls may only be used with a " - "OpenAIChatPromptExecutions.number_of_responses of 1." - ) - - # behavior for non-function calling or for enable, but not auto-invoke. - self._prepare_settings(settings, chat_history, stream_request=False, kernel=kernel) - if settings.function_choice_behavior is None or ( - settings.function_choice_behavior and not settings.function_choice_behavior.auto_invoke_kernel_functions - ): - return await self._send_chat_request(settings) - - # loop for auto-invoke function calls - for request_index in range(settings.function_choice_behavior.maximum_auto_invoke_attempts): - completions = await self._send_chat_request(settings) - # there is only one chat message, this was checked earlier - chat_history.add_message(message=completions[0]) - # get the function call contents from the chat message - function_calls = [item for item in chat_history.messages[-1].items if isinstance(item, FunctionCallContent)] - if (fc_count := len(function_calls)) == 0: - return completions - - logger.info(f"processing {fc_count} tool calls in parallel.") - - # this function either updates the chat history with the function call results - # or returns the context, with terminate set to True - # in which case the loop will break and the function calls are returned. - results = await asyncio.gather( - *[ - self._process_function_call( - function_call=function_call, - chat_history=chat_history, - kernel=kernel, - arguments=kwargs.get("arguments", None), - function_call_count=fc_count, - request_index=request_index, - function_call_behavior=settings.function_choice_behavior, - ) - for function_call in function_calls - ], - ) - - if any(result.terminate for result in results if result is not None): - return self._create_filter_early_terminate_chat_message_content(chat_history.messages[-len(results) :]) + settings.stream = False + settings.messages = self._prepare_chat_history_for_request(chat_history) + settings.ai_model_id = settings.ai_model_id or self.ai_model_id - self._update_settings(settings, chat_history, kernel=kernel) - else: - # do a final call, without function calling when the max has been reached. - settings.function_choice_behavior.auto_invoke_kernel_functions = False - return await self._send_chat_request(settings) + response = await self._send_request(request_settings=settings) + assert isinstance(response, ChatCompletion) # nosec + response_metadata = self._get_metadata_from_chat_response(response) + return [self._create_chat_message_content(response, choice, response_metadata) for choice in response.choices] @override - async def get_streaming_chat_message_contents( + @trace_streaming_chat_completion(MODEL_PROVIDER_NAME) + async def _inner_get_streaming_chat_message_contents( self, - chat_history: ChatHistory, + chat_history: "ChatHistory", settings: "PromptExecutionSettings", - **kwargs: Any, - ) -> AsyncGenerator[list[StreamingChatMessageContent], Any]: + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: if not isinstance(settings, OpenAIChatPromptExecutionSettings): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, OpenAIChatPromptExecutionSettings) # nosec - # For backwards compatibility we need to convert the `FunctionCallBehavior` to `FunctionChoiceBehavior` - # if this method is called with a `FunctionCallBehavior` object as part of the settings - if hasattr(settings, "function_call_behavior") and isinstance( - settings.function_call_behavior, FunctionCallBehavior - ): - settings.function_choice_behavior = FunctionChoiceBehavior.from_function_call_behavior( - settings.function_call_behavior - ) - - kernel = kwargs.get("kernel", None) - if settings.function_choice_behavior is not None: - if kernel is None: - raise ServiceInvalidExecutionSettingsError("The kernel is required for OpenAI tool calls.") - if settings.number_of_responses is not None and settings.number_of_responses > 1: - raise ServiceInvalidExecutionSettingsError( - "Auto-invocation of tool calls may only be used with a " - "OpenAIChatPromptExecutions.number_of_responses of 1." - ) - - # Prepare settings for streaming requests - self._prepare_settings(settings, chat_history, stream_request=True, kernel=kernel) - - request_attempts = ( - settings.function_choice_behavior.maximum_auto_invoke_attempts - if (settings.function_choice_behavior and settings.function_choice_behavior.auto_invoke_kernel_functions) - else 1 - ) - # hold the messages, if there are more than one response, it will not be used, so we flatten - for request_index in range(request_attempts): - all_messages: list[StreamingChatMessageContent] = [] - function_call_returned = False - async for messages in self._send_chat_stream_request(settings): - for msg in messages: - if msg is not None: - all_messages.append(msg) - if any(isinstance(item, FunctionCallContent) for item in msg.items): - function_call_returned = True - yield messages - - if ( - settings.function_choice_behavior is None - or ( - settings.function_choice_behavior - and not settings.function_choice_behavior.auto_invoke_kernel_functions - ) - or not function_call_returned - ): - # no need to process function calls - # note that we don't check the FinishReason and instead check whether there are any tool calls, - # as the service may return a FinishReason of "stop" even if there are tool calls to be made, - # in particular if a required tool is specified. - return - - # there is one response stream in the messages, combining now to create the full completion - # depending on the prompt, the message may contain both function call content and others - full_completion: StreamingChatMessageContent = reduce(lambda x, y: x + y, all_messages) - function_calls = [item for item in full_completion.items if isinstance(item, FunctionCallContent)] - chat_history.add_message(message=full_completion) - - fc_count = len(function_calls) - logger.info(f"processing {fc_count} tool calls in parallel.") - - # this function either updates the chat history with the function call results - # or returns the context, with terminate set to True - # in which case the loop will break and the function calls are returned. - # Exceptions are not caught, that is up to the developer, can be done with a filter - results = await asyncio.gather( - *[ - self._process_function_call( - function_call=function_call, - chat_history=chat_history, - kernel=kernel, - arguments=kwargs.get("arguments", None), - function_call_count=fc_count, - request_index=request_index, - function_call_behavior=settings.function_choice_behavior, - ) - for function_call in function_calls - ], - ) - if any(result.terminate for result in results if result is not None): - yield self._create_filter_early_terminate_chat_message_content(chat_history.messages[-len(results) :]) # type: ignore - break - - self._update_settings(settings, chat_history, kernel=kernel) - - # endregion - # region internal handlers - - async def _send_chat_request(self, settings: OpenAIChatPromptExecutionSettings) -> list["ChatMessageContent"]: - """Send the chat request.""" - response = await self._send_request(request_settings=settings) - assert isinstance(response, ChatCompletion) # nosec - response_metadata = self._get_metadata_from_chat_response(response) - return [self._create_chat_message_content(response, choice, response_metadata) for choice in response.choices] + settings.stream = True + settings.stream_options = {"include_usage": True} + settings.messages = self._prepare_chat_history_for_request(chat_history) + settings.ai_model_id = settings.ai_model_id or self.ai_model_id - async def _send_chat_stream_request( - self, settings: OpenAIChatPromptExecutionSettings - ) -> AsyncGenerator[list["StreamingChatMessageContent"], None]: - """Send the chat stream request.""" response = await self._send_request(request_settings=settings) if not isinstance(response, AsyncStream): raise ServiceInvalidResponseError("Expected an AsyncStream[ChatCompletionChunk] response.") async for chunk in response: - if len(chunk.choices) == 0: + if len(chunk.choices) == 0 and chunk.usage is None: continue + assert isinstance(chunk, ChatCompletionChunk) # nosec chunk_metadata = self._get_metadata_from_streaming_chat_response(chunk) - yield [ - self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) for choice in chunk.choices - ] + if chunk.usage is not None: + # Usage is contained in the last chunk where the choices are empty + # We are duplicating the usage metadata to all the choices in the response + yield [ + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + content="", + choice_index=i, + inner_content=chunk, + ai_model_id=settings.ai_model_id, + metadata=chunk_metadata, + ) + for i in range(settings.number_of_responses or 1) + ] + else: + yield [ + self._create_streaming_chat_message_content(chunk, choice, chunk_metadata) + for choice in chunk.choices + ] + + @override + def _verify_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if not isinstance(settings, OpenAIChatPromptExecutionSettings): + raise ServiceInvalidExecutionSettingsError("The settings must be an OpenAIChatPromptExecutionSettings.") + if settings.number_of_responses is not None and settings.number_of_responses > 1: + raise ServiceInvalidExecutionSettingsError( + "Auto-invocation of tool calls may only be used with a " + "OpenAIChatPromptExecutions.number_of_responses of 1." + ) + + @override + def _update_function_choice_settings_callback( + self, + ) -> Callable[[FunctionCallChoiceConfiguration, "PromptExecutionSettings", FunctionChoiceType], None]: + return update_settings_from_function_call_configuration + + @override + def _reset_function_choice_settings(self, settings: "PromptExecutionSettings") -> None: + if hasattr(settings, "tool_choice"): + settings.tool_choice = None + if hasattr(settings, "tools"): + settings.tools = None # endregion + # region content creation def _create_chat_message_content( @@ -284,6 +173,8 @@ def _create_chat_message_content( items.extend(self._get_function_call_from_chat_choice(choice)) if choice.message.content: items.append(TextContent(text=choice.message.content)) + elif hasattr(choice.message, "refusal") and choice.message.refusal: + items.append(TextContent(text=choice.message.refusal)) return ChatMessageContent( inner_content=response, @@ -318,32 +209,13 @@ def _create_streaming_chat_message_content( items=items, ) - def _create_filter_early_terminate_chat_message_content( - self, - messages: list[ChatMessageContent], - ) -> list[ChatMessageContent]: - """Add an early termination message to the chat messages. - - This method combines the FunctionResultContent items from separate ChatMessageContent messages, - and is used in the event that the `context.terminate = True` condition is met. - """ - items: list[Any] = [] - for message in messages: - items.extend([item for item in message.items if isinstance(item, FunctionResultContent)]) - return [ - ChatMessageContent( - role=AuthorRole.TOOL, - items=items, - ) - ] - def _get_metadata_from_chat_response(self, response: ChatCompletion) -> dict[str, Any]: """Get metadata from a chat response.""" return { "id": response.id, "created": response.created, "system_fingerprint": response.system_fingerprint, - "usage": response.usage if hasattr(response, "usage") else None, + "usage": CompletionUsage.from_openai(response.usage) if response.usage is not None else None, } def _get_metadata_from_streaming_chat_response(self, response: ChatCompletionChunk) -> dict[str, Any]: @@ -352,6 +224,7 @@ def _get_metadata_from_streaming_chat_response(self, response: ChatCompletionChu "id": response.id, "created": response.created, "system_fingerprint": response.system_fingerprint, + "usage": CompletionUsage.from_openai(response.usage) if response.usage is not None else None, } def _get_metadata_from_chat_choice(self, choice: Choice | ChunkChoice) -> dict[str, Any]: @@ -391,39 +264,8 @@ def _get_function_call_from_chat_choice(self, choice: Choice | ChunkChoice) -> l return [] # endregion - # region request preparation - - def _prepare_settings( - self, - settings: OpenAIChatPromptExecutionSettings, - chat_history: ChatHistory, - stream_request: bool = False, - kernel: "Kernel | None" = None, - ) -> None: - """Prepare the prompt execution settings for the chat request.""" - settings.stream = stream_request - if not settings.ai_model_id: - settings.ai_model_id = self.ai_model_id - self._update_settings(settings=settings, chat_history=chat_history, kernel=kernel) - - def _update_settings( - self, - settings: OpenAIChatPromptExecutionSettings, - chat_history: ChatHistory, - kernel: "Kernel | None" = None, - ) -> None: - """Update the settings with the chat history.""" - settings.messages = self._prepare_chat_history_for_request(chat_history) - if settings.function_choice_behavior and kernel: - settings.function_choice_behavior.configure( - kernel=kernel, - update_settings_callback=update_settings_from_function_call_configuration, - settings=settings, - ) - # endregion # region function calling - @deprecated("Use `invoke_function_call` from the kernel instead with `FunctionChoiceBehavior`.") async def _process_function_call( self, diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py index c65e0bc01989..0af2cbd44c75 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_handler.py @@ -5,17 +5,22 @@ from typing import Any from openai import AsyncOpenAI, AsyncStream, BadRequestError +from openai.lib._parsing._completions import type_to_response_format_param from openai.types import Completion, CreateEmbeddingResponse from openai.types.chat import ChatCompletion, ChatCompletionChunk +from pydantic import BaseModel from semantic_kernel.connectors.ai.open_ai.exceptions.content_filter_ai_exception import ContentFilterAIException from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIChatPromptExecutionSettings, OpenAIEmbeddingPromptExecutionSettings, OpenAIPromptExecutionSettings, ) from semantic_kernel.connectors.ai.open_ai.services.open_ai_model_types import OpenAIModelTypes +from semantic_kernel.connectors.utils.structured_output_schema import generate_structured_output_response_format_schema from semantic_kernel.exceptions import ServiceResponseException from semantic_kernel.kernel_pydantic import KernelBaseModel +from semantic_kernel.schema.kernel_json_schema_builder import KernelJsonSchemaBuilder logger: logging.Logger = logging.getLogger(__name__) @@ -35,10 +40,13 @@ async def _send_request( ) -> ChatCompletion | Completion | AsyncStream[ChatCompletionChunk] | AsyncStream[Completion]: """Execute the appropriate call to OpenAI models.""" try: + settings = request_settings.prepare_settings_dict() if self.ai_model_type == OpenAIModelTypes.CHAT: - response = await self.client.chat.completions.create(**request_settings.prepare_settings_dict()) + assert isinstance(request_settings, OpenAIChatPromptExecutionSettings) # nosec + self._handle_structured_output(request_settings, settings) + response = await self.client.chat.completions.create(**settings) else: - response = await self.client.completions.create(**request_settings.prepare_settings_dict()) + response = await self.client.completions.create(**settings) self.store_usage(response) return response except BadRequestError as ex: @@ -68,6 +76,25 @@ async def _send_embedding_request(self, settings: OpenAIEmbeddingPromptExecution ex, ) from ex + def _handle_structured_output( + self, request_settings: OpenAIChatPromptExecutionSettings, settings: dict[str, Any] + ) -> None: + response_format = getattr(request_settings, "response_format", None) + if getattr(request_settings, "structured_json_response", False) and response_format: + # Case 1: response_format is a type and subclass of BaseModel + if isinstance(response_format, type) and issubclass(response_format, BaseModel): + settings["response_format"] = type_to_response_format_param(response_format) + # Case 2: response_format is a type but not a subclass of BaseModel + elif isinstance(response_format, type): + generated_schema = KernelJsonSchemaBuilder.build(parameter_type=response_format, structured_output=True) + assert generated_schema is not None # nosec + settings["response_format"] = generate_structured_output_response_format_schema( + name=response_format.__name__, schema=generated_schema + ) + # Case 3: response_format is a dictionary, pass it without modification + elif isinstance(response_format, dict): + settings["response_format"] = response_format + def store_usage( self, response: ChatCompletion diff --git a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py index fbcb90767e46..3dd43339837b 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py +++ b/python/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion_base.py @@ -18,6 +18,7 @@ from openai.types.chat.chat_completion_chunk import ChatCompletionChunk from openai.types.chat.chat_completion_chunk import Choice as ChatCompletionChunkChoice +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIChatPromptExecutionSettings, OpenAITextPromptExecutionSettings, @@ -26,7 +27,10 @@ from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.utils.telemetry.decorators import trace_text_completion +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_streaming_text_completion, + trace_text_completion, +) if TYPE_CHECKING: from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings @@ -39,13 +43,21 @@ class OpenAITextCompletionBase(OpenAIHandler, TextCompletionClientBase): MODEL_PROVIDER_NAME: ClassVar[str] = "openai" + # region Overriding base class methods + + # Override from AIServiceClientBase @override def get_prompt_execution_settings_class(self) -> type["PromptExecutionSettings"]: return OpenAITextPromptExecutionSettings + # Override from AIServiceClientBase + @override + def service_url(self) -> str | None: + return str(self.client.base_url) + @override @trace_text_completion(MODEL_PROVIDER_NAME) - async def get_text_contents( + async def _inner_get_text_contents( self, prompt: str, settings: "PromptExecutionSettings", @@ -53,19 +65,23 @@ async def get_text_contents( if not isinstance(settings, (OpenAITextPromptExecutionSettings, OpenAIChatPromptExecutionSettings)): settings = self.get_prompt_execution_settings_from_settings(settings) assert isinstance(settings, (OpenAITextPromptExecutionSettings, OpenAIChatPromptExecutionSettings)) # nosec + if isinstance(settings, OpenAITextPromptExecutionSettings): settings.prompt = prompt else: settings.messages = [{"role": "user", "content": prompt}] - if settings.ai_model_id is None: - settings.ai_model_id = self.ai_model_id + + settings.ai_model_id = settings.ai_model_id or self.ai_model_id + response = await self._send_request(request_settings=settings) assert isinstance(response, (TextCompletion, ChatCompletion)) # nosec + metadata = self._get_metadata_from_text_response(response) return [self._create_text_content(response, choice, metadata) for choice in response.choices] @override - async def get_streaming_text_contents( + @trace_streaming_text_completion(MODEL_PROVIDER_NAME) + async def _inner_get_streaming_text_contents( self, prompt: str, settings: "PromptExecutionSettings", @@ -81,10 +97,13 @@ async def get_streaming_text_contents( settings.messages = [{"role": "user", "content": prompt}] else: settings.messages.append({"role": "user", "content": prompt}) - settings.ai_model_id = self.ai_model_id + + settings.ai_model_id = settings.ai_model_id or self.ai_model_id settings.stream = True + response = await self._send_request(request_settings=settings) assert isinstance(response, AsyncStream) # nosec + async for chunk in response: if len(chunk.choices) == 0: continue @@ -92,6 +111,8 @@ async def get_streaming_text_contents( chunk_metadata = self._get_metadata_from_text_response(chunk) yield [self._create_streaming_text_content(chunk, choice, chunk_metadata) for choice in chunk.choices] + # endregion + def _create_text_content( self, response: TextCompletion | ChatCompletion, @@ -136,8 +157,8 @@ def _get_metadata_from_text_response( "created": response.created, "system_fingerprint": response.system_fingerprint, } - if hasattr(response, "usage"): - ret["usage"] = response.usage + if response.usage is not None: + ret["usage"] = CompletionUsage.from_openai(response.usage) return ret def _get_metadata_from_text_choice( diff --git a/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py b/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py index a5f26db8fb8d..c201643a6bf2 100644 --- a/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py +++ b/python/semantic_kernel/connectors/ai/open_ai/settings/azure_open_ai_settings.py @@ -1,12 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. - from typing import ClassVar from pydantic import SecretStr from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION +from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings +from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token class AzureOpenAISettings(KernelBaseSettings): @@ -59,7 +60,9 @@ class AzureOpenAISettings(KernelBaseSettings): (Env var AZURE_OPENAI_ENDPOINT) - api_version: str | None - The API version to use. The default value is "2024-02-01". (Env var AZURE_OPENAI_API_VERSION) - - env_file_path: str | None - if provided, the .env settings are read from this file path location + - token_endpoint: str - The token endpoint to use to retrieve the authentication token. + The default value is "https://cognitiveservices.azure.com". + (Env var AZURE_OPENAI_TOKEN_ENDPOINT) """ env_prefix: ClassVar[str] = "AZURE_OPENAI_" @@ -72,3 +75,26 @@ class AzureOpenAISettings(KernelBaseSettings): base_url: HttpsUrl | None = None api_key: SecretStr | None = None api_version: str = DEFAULT_AZURE_API_VERSION + token_endpoint: str = "https://cognitiveservices.azure.com" + + def get_azure_openai_auth_token(self, token_endpoint: str | None = None) -> str | None: + """Retrieve a Microsoft Entra Auth Token for a given token endpoint for the use with Azure OpenAI. + + The required role for the token is `Cognitive Services OpenAI Contributor`. + The token endpoint may be specified as an environment variable, via the .env + file or as an argument. If the token endpoint is not provided, the default is None. + The `token_endpoint` argument takes precedence over the `token_endpoint` attribute. + + Args: + token_endpoint: The token endpoint to use. Defaults to `https://cognitiveservices.azure.com`. + + Returns: + The Azure token or None if the token could not be retrieved. + + Raises: + ServiceInitializationError: If the token endpoint is not provided. + """ + endpoint_to_use = token_endpoint or self.token_endpoint + if endpoint_to_use is None: + raise ServiceInitializationError("Please provide a token endpoint to retrieve the authentication token.") + return get_entra_auth_token(endpoint_to_use) diff --git a/python/semantic_kernel/connectors/ai/prompt_execution_settings.py b/python/semantic_kernel/connectors/ai/prompt_execution_settings.py index b683a58e17fa..4c3abc8f5419 100644 --- a/python/semantic_kernel/connectors/ai/prompt_execution_settings.py +++ b/python/semantic_kernel/connectors/ai/prompt_execution_settings.py @@ -79,6 +79,7 @@ def prepare_settings_dict(self, **kwargs) -> dict[str, Any]: exclude={ "service_id", "extension_data", + "structured_json_response", }, exclude_none=True, by_alias=True, diff --git a/python/semantic_kernel/connectors/ai/text_completion_client_base.py b/python/semantic_kernel/connectors/ai/text_completion_client_base.py index c03d30a6d2e3..76b5d283ce90 100644 --- a/python/semantic_kernel/connectors/ai/text_completion_client_base.py +++ b/python/semantic_kernel/connectors/ai/text_completion_client_base.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. -from abc import ABC, abstractmethod +import copy +from abc import ABC from collections.abc import AsyncGenerator from typing import TYPE_CHECKING, Any @@ -14,7 +15,47 @@ class TextCompletionClientBase(AIServiceClientBase, ABC): """Base class for text completion AI services.""" - @abstractmethod + # region Internal methods to be implemented by the derived classes + + async def _inner_get_text_contents( + self, + prompt: str, + settings: "PromptExecutionSettings", + ) -> list["TextContent"]: + """Send a text completion request to the AI service. + + Args: + prompt (str): The prompt to send to the LLM. + settings (PromptExecutionSettings): Settings for the request. + + Returns: + list[TextContent]: A string or list of strings representing the response(s) from the LLM. + """ + raise NotImplementedError("The _inner_get_text_contents method is not implemented.") + + async def _inner_get_streaming_text_contents( + self, + prompt: str, + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingTextContent"], Any]: + """Send a streaming text request to the AI service. + + Args: + prompt (str): The prompt to send to the LLM. + settings (PromptExecutionSettings): Settings for the request. + + Yields: + list[StreamingTextContent]: A stream representing the response(s) from the LLM. + """ + # Below is needed for mypy: https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators + raise NotImplementedError("The _inner_get_streaming_text_contents method is not implemented.") + if False: + yield + + # endregion + + # region Public methods + async def get_text_contents( self, prompt: str, @@ -29,6 +70,10 @@ async def get_text_contents( Returns: list[TextContent]: A string or list of strings representing the response(s) from the LLM. """ + # Create a copy of the settings to avoid modifying the original settings + settings = copy.deepcopy(settings) + + return await self._inner_get_text_contents(prompt, settings) async def get_text_content(self, prompt: str, settings: "PromptExecutionSettings") -> "TextContent | None": """This is the method that is called from the kernel to get a response from a text-optimized LLM. @@ -46,8 +91,7 @@ async def get_text_content(self, prompt: str, settings: "PromptExecutionSettings # this should not happen, should error out before returning an empty list return None # pragma: no cover - @abstractmethod - def get_streaming_text_contents( + async def get_streaming_text_contents( self, prompt: str, settings: "PromptExecutionSettings", @@ -61,7 +105,11 @@ def get_streaming_text_contents( Yields: list[StreamingTextContent]: A stream representing the response(s) from the LLM. """ - ... + # Create a copy of the settings to avoid modifying the original settings + settings = copy.deepcopy(settings) + + async for contents in self._inner_get_streaming_text_contents(prompt, settings): + yield contents async def get_streaming_text_content( self, prompt: str, settings: "PromptExecutionSettings" @@ -81,3 +129,5 @@ async def get_streaming_text_content( else: # this should not happen, should error out before returning an empty list yield None # pragma: no cover + + # endregion diff --git a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py index b0d8734449df..f6c1853b2cce 100644 --- a/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py +++ b/python/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py @@ -181,7 +181,7 @@ async def upsert_batch(self, collection_name: str, records: list[MemoryRecord]) # upsert is checking collection existence return [await self.upsert(collection_name, record) for record in records] - async def get(self, collection_name: str, key: str, with_embedding: bool) -> MemoryRecord: + async def get(self, collection_name: str, key: str, with_embedding: bool = False) -> MemoryRecord: """Gets a record. Args: @@ -200,7 +200,12 @@ async def get(self, collection_name: str, key: str, with_embedding: bool) -> Mem f"Record with key '{key}' does not exist in collection '{collection_name}'" ) from exc - async def get_batch(self, collection_name: str, keys: list[str], with_embeddings: bool) -> list[MemoryRecord]: + async def get_batch( + self, + collection_name: str, + keys: list[str], + with_embeddings: bool = False + ) -> list[MemoryRecord]: """Gets a batch of records. Args: diff --git a/python/semantic_kernel/connectors/memory/postgres/__init__.py b/python/semantic_kernel/connectors/memory/postgres/__init__.py index 7a0e7301d8e8..34cbc1f6414b 100644 --- a/python/semantic_kernel/connectors/memory/postgres/__init__.py +++ b/python/semantic_kernel/connectors/memory/postgres/__init__.py @@ -1,8 +1,10 @@ # Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection from semantic_kernel.connectors.memory.postgres.postgres_memory_store import ( PostgresMemoryStore, ) from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings +from semantic_kernel.connectors.memory.postgres.postgres_store import PostgresStore -__all__ = ["PostgresMemoryStore", "PostgresSettings"] +__all__ = ["PostgresCollection", "PostgresMemoryStore", "PostgresSettings", "PostgresStore"] diff --git a/python/semantic_kernel/connectors/memory/postgres/constants.py b/python/semantic_kernel/connectors/memory/postgres/constants.py new file mode 100644 index 000000000000..6c08ef2052e6 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/postgres/constants.py @@ -0,0 +1,14 @@ +# Copyright (c) Microsoft. All rights reserved. + +DEFAULT_SCHEMA = "public" + +# Limitation based on pgvector documentation https://github.com/pgvector/pgvector#what-if-i-want-to-index-vectors-with-more-than-2000-dimensions +MAX_DIMENSIONALITY = 2000 + +# Environment Variables +PGHOST_ENV_VAR = "PGHOST" +PGPORT_ENV_VAR = "PGPORT" +PGDATABASE_ENV_VAR = "PGDATABASE" +PGUSER_ENV_VAR = "PGUSER" +PGPASSWORD_ENV_VAR = "PGPASSWORD" +PGSSL_MODE_ENV_VAR = "PGSSL_MODE" diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py b/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py new file mode 100644 index 000000000000..9bfb839d4ad8 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_collection.py @@ -0,0 +1,410 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from collections.abc import Sequence +from typing import Any, ClassVar, TypeVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from psycopg import sql +from psycopg.errors import DatabaseError +from psycopg_pool import AsyncConnectionPool +from pydantic import PrivateAttr + +from semantic_kernel.connectors.memory.postgres.constants import DEFAULT_SCHEMA, MAX_DIMENSIONALITY +from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings +from semantic_kernel.connectors.memory.postgres.utils import ( + convert_dict_to_row, + convert_row_to_dict, + get_vector_index_ops_str, + python_type_to_postgres, +) +from semantic_kernel.data.const import IndexKind +from semantic_kernel.data.vector_store_model_definition import VectorStoreRecordDefinition +from semantic_kernel.data.vector_store_record_collection import VectorStoreRecordCollection +from semantic_kernel.data.vector_store_record_fields import VectorStoreRecordKeyField, VectorStoreRecordVectorField +from semantic_kernel.exceptions.memory_connector_exceptions import ( + MemoryConnectorException, + VectorStoreModelValidationError, +) +from semantic_kernel.kernel_types import OneOrMany +from semantic_kernel.utils.experimental_decorator import experimental_class + +TKey = TypeVar("TKey", str, int) +TModel = TypeVar("TModel") + +logger: logging.Logger = logging.getLogger(__name__) + + +@experimental_class +class PostgresCollection(VectorStoreRecordCollection[TKey, TModel]): + """PostgreSQL collection implementation.""" + + connection_pool: AsyncConnectionPool | None = None + db_schema: str = DEFAULT_SCHEMA + supported_key_types: ClassVar[list[str] | None] = ["str", "int"] + supported_vector_types: ClassVar[list[str] | None] = ["float"] + + _handle_pool_close: bool = PrivateAttr(False) + """Whether the collection should handle closing the pool. True if the pool was created by the collection.""" + + _settings: PostgresSettings = PrivateAttr() + """Postgres settings""" + + def __init__( + self, + collection_name: str, + data_model_type: type[TModel], + data_model_definition: VectorStoreRecordDefinition | None = None, + connection_pool: AsyncConnectionPool | None = None, + db_schema: str = DEFAULT_SCHEMA, + env_file_path: str | None = None, + env_file_encoding: str | None = None, + settings: PostgresSettings | None = None, + ): + """Initialize the collection. + + Args: + collection_name: The name of the collection, which corresponds to the table name. + data_model_type: The type of the data model. + data_model_definition: The data model definition. + connection_pool: The connection pool. + db_schema: The database schema. + env_file_path: Use the environment settings file as a fallback to environment variables. + env_file_encoding: The encoding of the environment settings file. + settings: The settings for creating a new connection pool. If not provided, the settings will be created + from the environment. + """ + super().__init__( + collection_name=collection_name, + data_model_type=data_model_type, + data_model_definition=data_model_definition, + connection_pool=connection_pool, + db_schema=db_schema, + ) + + self._settings = settings or PostgresSettings.create( + env_file_path=env_file_path, env_file_encoding=env_file_encoding + ) + + @override + async def __aenter__(self) -> "PostgresCollection": + # If the connection pool was not provided, create a new one. + if not self.connection_pool: + self.connection_pool = await self._settings.create_connection_pool() + self._handle_pool_close = True + return self + + @override + async def __aexit__(self, *args): + if self._handle_pool_close and self.connection_pool: + await self.connection_pool.close() + # If the pool was created by the collection, set it to None to enable reusing the collection. + if self._settings: + self.connection_pool = None + + @override + def _validate_data_model(self) -> None: + """Validate the data model.""" + + def _check_dimensionality(dimension_num): + if dimension_num > MAX_DIMENSIONALITY: + raise VectorStoreModelValidationError( + f"Dimensionality of {dimension_num} exceeds the maximum allowed value of {MAX_DIMENSIONALITY}." + ) + if dimension_num <= 0: + raise VectorStoreModelValidationError("Dimensionality must be a positive integer. ") + + for field in self.data_model_definition.vector_fields: + if field.dimensions: + _check_dimensionality(field.dimensions) + + super()._validate_data_model() + + @override + async def _inner_upsert( + self, + records: Sequence[dict[str, Any]], + **kwargs: Any, + ) -> Sequence[TKey]: + """Upsert records into the database. + + Args: + records: The records, the format is specific to the store. + **kwargs: Additional arguments, to be passed to the store. + + Returns: + The keys of the upserted records. + """ + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + keys = [] + try: + async with ( + self.connection_pool.connection() as conn, + conn.transaction(), + conn.cursor() as cur, + ): + # Split the records into batches + max_rows_per_transaction = self._settings.max_rows_per_transaction + for i in range(0, len(records), max_rows_per_transaction): + record_batch = records[i : i + max_rows_per_transaction] + + fields = list(self.data_model_definition.fields.items()) + + row_values = [convert_dict_to_row(record, fields) for record in record_batch] + + # Execute the INSERT statement for each batch + await cur.executemany( + sql.SQL("INSERT INTO {}.{} ({}) VALUES ({}) ON CONFLICT ({}) DO UPDATE SET {}").format( + sql.Identifier(self.db_schema), + sql.Identifier(self.collection_name), + sql.SQL(", ").join(sql.Identifier(field.name) for _, field in fields), + sql.SQL(", ").join(sql.Placeholder() * len(fields)), + sql.Identifier(self.data_model_definition.key_field.name), + sql.SQL(", ").join( + sql.SQL("{field} = EXCLUDED.{field}").format(field=sql.Identifier(field.name)) + for _, field in fields + if field.name != self.data_model_definition.key_field.name + ), + ), + row_values, + ) + keys.extend(record.get(self.data_model_definition.key_field.name) for record in record_batch) + + except DatabaseError as error: + # Rollback happens automatically if an exception occurs within the transaction block + raise MemoryConnectorException(f"Error upserting records: {error}") from error + + return keys + + @override + async def _inner_get(self, keys: Sequence[TKey], **kwargs: Any) -> OneOrMany[dict[str, Any]] | None: + """Get records from the database. + + Args: + keys: The keys to get. + **kwargs: Additional arguments. + + Returns: + The records from the store, not deserialized. + """ + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + fields = [(field.name, field) for field in self.data_model_definition.fields.values()] + try: + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + sql.SQL("SELECT {} FROM {}.{} WHERE {} IN ({})").format( + sql.SQL(", ").join(sql.Identifier(name) for (name, _) in fields), + sql.Identifier(self.db_schema), + sql.Identifier(self.collection_name), + sql.Identifier(self.data_model_definition.key_field.name), + sql.SQL(", ").join(sql.Literal(key) for key in keys), + ) + ) + rows = await cur.fetchall() + if not rows: + return None + return [convert_row_to_dict(row, fields) for row in rows] + + except DatabaseError as error: + raise MemoryConnectorException(f"Error getting records: {error}") from error + + @override + async def _inner_delete(self, keys: Sequence[TKey], **kwargs: Any) -> None: + """Delete the records with the given keys. + + Args: + keys: The keys. + **kwargs: Additional arguments. + """ + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + try: + async with ( + self.connection_pool.connection() as conn, + conn.transaction(), + conn.cursor() as cur, + ): + # Split the keys into batches + max_rows_per_transaction = self._settings.max_rows_per_transaction + for i in range(0, len(keys), max_rows_per_transaction): + key_batch = keys[i : i + max_rows_per_transaction] + + # Execute the DELETE statement for each batch + await cur.execute( + sql.SQL("DELETE FROM {}.{} WHERE {} IN ({})").format( + sql.Identifier(self.db_schema), + sql.Identifier(self.collection_name), + sql.Identifier(self.data_model_definition.key_field.name), + sql.SQL(", ").join(sql.Literal(key) for key in key_batch), + ) + ) + + except DatabaseError as error: + # Rollback happens automatically if an exception occurs within the transaction block + raise MemoryConnectorException(f"Error deleting records: {error}") from error + + @override + def _serialize_dicts_to_store_models(self, records: Sequence[dict[str, Any]], **kwargs: Any) -> Sequence[Any]: + """Serialize a list of dicts of the data to the store model. + + Pass the records through without modification. + """ + return records + + @override + def _deserialize_store_models_to_dicts(self, records: Sequence[Any], **kwargs: Any) -> Sequence[dict[str, Any]]: + """Deserialize the store models to a list of dicts. + + Pass the records through without modification. + """ + return records + + @override + async def create_collection(self, **kwargs: Any) -> None: + """Create a PostgreSQL table based on a dictionary of VectorStoreRecordField. + + Args: + table_name: Name of the table to be created + fields: A dictionary where keys are column names and values are VectorStoreRecordField instances + **kwargs: Additional arguments + """ + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + column_definitions = [] + table_name = self.collection_name + + for field_name, field in self.data_model_definition.fields.items(): + if not field.property_type: + raise ValueError(f"Property type is not defined for field '{field_name}'") + + # If the property type represents a Python type, convert it to a PostgreSQL type + property_type = python_type_to_postgres(field.property_type) or field.property_type.upper() + + # For Vector fields with dimensions, use pgvector's VECTOR type + # Note that other vector types are supported in pgvector (e.g. halfvec), + # but would need to be created outside of this method. + if isinstance(field, VectorStoreRecordVectorField) and field.dimensions: + column_definitions.append( + sql.SQL("{} VECTOR({})").format(sql.Identifier(field_name), sql.Literal(field.dimensions)) + ) + elif isinstance(field, VectorStoreRecordKeyField): + # Use the property_type directly for key fields + column_definitions.append( + sql.SQL("{} {} PRIMARY KEY").format(sql.Identifier(field_name), sql.SQL(property_type)) + ) + else: + # Use the property_type directly for other types + column_definitions.append(sql.SQL("{} {}").format(sql.Identifier(field_name), sql.SQL(property_type))) + + columns_str = sql.SQL(", ").join(column_definitions) + + create_table_query = sql.SQL("CREATE TABLE {}.{} ({})").format( + sql.Identifier(self.db_schema), sql.Identifier(table_name), columns_str + ) + + try: + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute(create_table_query) + await conn.commit() + + logger.info(f"Postgres table '{table_name}' created successfully.") + + # If the vector field defines an index, apply it + for vector_field in self.data_model_definition.vector_fields: + if vector_field.index_kind: + await self._create_index(table_name, vector_field) + + except DatabaseError as error: + raise MemoryConnectorException(f"Error creating table: {error}") from error + + async def _create_index(self, table_name: str, vector_field: VectorStoreRecordVectorField) -> None: + """Create an index on a column in the table. + + Args: + table_name: The name of the table. + vector_field: The vector field definition that the index is based on. + """ + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + column_name = vector_field.name + index_name = f"{table_name}_{column_name}_idx" + + # Only support creating HNSW indexes through the vector store + if vector_field.index_kind != IndexKind.HNSW: + raise MemoryConnectorException( + f"Unsupported index kind: {vector_field.index_kind}. " + "If you need to create an index of this type, please do so manually. " + "Only HNSW indexes are supported through the vector store." + ) + + # Require the distance function to be set for HNSW indexes + if not vector_field.distance_function: + raise MemoryConnectorException( + "Distance function must be set for HNSW indexes. " + "Please set the distance function in the vector field definition." + ) + + ops_str = get_vector_index_ops_str(vector_field.distance_function) + + try: + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + sql.SQL("CREATE INDEX {} ON {}.{} USING {} ({} {})").format( + sql.Identifier(index_name), + sql.Identifier(self.db_schema), + sql.Identifier(table_name), + sql.SQL(vector_field.index_kind), + sql.Identifier(column_name), + sql.SQL(ops_str), + ) + ) + await conn.commit() + + logger.info(f"Index '{index_name}' created successfully on column '{column_name}'.") + + except DatabaseError as error: + raise MemoryConnectorException(f"Error creating index: {error}") from error + + @override + async def does_collection_exist(self, **kwargs: Any) -> bool: + """Check if the collection exists.""" + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + """ + SELECT table_name + FROM information_schema.tables + WHERE table_schema = %s AND table_name = %s + """, + (self.db_schema, self.collection_name), + ) + row = await cur.fetchone() + return bool(row) + + @override + async def delete_collection(self, **kwargs: Any) -> None: + """Delete the collection.""" + if self.connection_pool is None: + raise MemoryConnectorException("Connection pool is not available, use the collection as a context manager.") + + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + sql.SQL("DROP TABLE {scm}.{tbl} CASCADE").format( + scm=sql.Identifier(self.db_schema), tbl=sql.Identifier(self.collection_name) + ), + ) + await conn.commit() diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py index 7d73d6c9c62a..3d553c570846 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_memory_store.py @@ -11,6 +11,7 @@ from psycopg_pool import ConnectionPool from pydantic import ValidationError +from semantic_kernel.connectors.memory.postgres.constants import DEFAULT_SCHEMA, MAX_DIMENSIONALITY from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings from semantic_kernel.exceptions import ( ServiceInitializationError, @@ -22,10 +23,6 @@ from semantic_kernel.memory.memory_store_base import MemoryStoreBase from semantic_kernel.utils.experimental_decorator import experimental_class -# Limitation based on pgvector documentation https://github.com/pgvector/pgvector#what-if-i-want-to-index-vectors-with-more-than-2000-dimensions -MAX_DIMENSIONALITY = 2000 -DEFAULT_SCHEMA = "public" - logger: logging.Logger = logging.getLogger(__name__) @@ -42,8 +39,8 @@ def __init__( self, connection_string: str, default_dimensionality: int, - min_pool: int, - max_pool: int, + min_pool: int | None = None, + max_pool: int | None = None, schema: str = DEFAULT_SCHEMA, env_file_path: str | None = None, env_file_encoding: str | None = None, @@ -51,14 +48,14 @@ def __init__( """Initializes a new instance of the PostgresMemoryStore class. Args: - connection_string (str): The connection string to the Postgres database. - default_dimensionality (int): The default dimensionality of the embeddings. - min_pool (int): The minimum number of connections in the connection pool. - max_pool (int): The maximum number of connections in the connection pool. - schema (str): The schema to use. (default: {"public"}) - env_file_path (str | None): Use the environment settings file as a fallback + connection_string: The connection string to the Postgres database. + default_dimensionality: The default dimensionality of the embeddings. + min_pool: The minimum number of connections in the connection pool. + max_pool: The maximum number of connections in the connection pool. + schema: The schema to use. (default: {"public"}) + env_file_path: Use the environment settings file as a fallback to environment variables. (Optional) - env_file_encoding (str | None): The encoding of the environment settings file. + env_file_encoding: The encoding of the environment settings file. """ try: postgres_settings = PostgresSettings.create( @@ -69,11 +66,14 @@ def __init__( except ValidationError as ex: raise MemoryConnectorInitializationError("Failed to create Postgres settings.", ex) from ex + min_pool = min_pool or postgres_settings.min_pool + max_pool = max_pool or postgres_settings.max_pool + self._check_dimensionality(default_dimensionality) self._default_dimensionality = default_dimensionality self._connection_pool = ConnectionPool( - postgres_settings.connection_string.get_secret_value(), min_size=min_pool, max_size=max_pool + min_size=min_pool, max_size=max_pool, open=True, kwargs=postgres_settings.get_connection_args() ) self._schema = schema atexit.register(self._connection_pool.close) @@ -86,8 +86,8 @@ async def create_collection( r"""Creates a new collection. Args: - collection_name (str): The name of the collection to create.\n - dimension_num (Optional[int]): The dimensionality of the embeddings. (default: {None}) + collection_name: The name of the collection to create.\n + dimension_num: The dimensionality of the embeddings. (default: {None}) Uses the default dimensionality when not provided Returns: @@ -120,7 +120,7 @@ async def get_collections(self) -> list[str]: """Gets the list of collections. Returns: - List[str]: The list of collections. + The list of collections. """ with self._connection_pool.connection() as conn, conn.cursor() as cur: return await self.__get_collections(cur) @@ -145,23 +145,23 @@ async def does_collection_exist(self, collection_name: str) -> bool: """Checks if a collection exists. Args: - collection_name (str): The name of the collection to check. + collection_name: The name of the collection to check. Returns: - bool: True if the collection exists; otherwise, False. + True if the collection exists; otherwise, False. """ with self._connection_pool.connection() as conn, conn.cursor() as cur: return await self.__does_collection_exist(cur, collection_name) async def upsert(self, collection_name: str, record: MemoryRecord) -> str: - r"""Upserts a record. + """Upserts a record. Args: - collection_name (str): The name of the collection to upsert the record into.\n - record (MemoryRecord): The record to upsert. + collection_name: The name of the collection to upsert the record into. + record: The record to upsert. Returns: - str: The unique database key of the record. In Pinecone, this is the record ID. + The unique database key of the record. In Pinecone, this is the record ID. """ with self._connection_pool.connection() as conn, conn.cursor() as cur: if not await self.__does_collection_exist(cur, collection_name): @@ -197,8 +197,8 @@ async def upsert_batch(self, collection_name: str, records: list[MemoryRecord]) """Upserts a batch of records. Args: - collection_name (str): The name of the collection to upsert the records into. - records (List[MemoryRecord]): The records to upsert. + collection_name: The name of the collection to upsert the records into. + records: The records to upsert. Returns: List[str]: The unique database keys of the records. @@ -246,12 +246,12 @@ async def get(self, collection_name: str, key: str, with_embedding: bool = False """Gets a record. Args: - collection_name (str): The name of the collection to get the record from. - key (str): The unique database key of the record. - with_embedding (bool): Whether to include the embedding in the result. (default: {False}) + collection_name: The name of the collection to get the record from. + key: The unique database key of the record. + with_embedding: Whether to include the embedding in the result. (default: {False}) Returns: - MemoryRecord: The record. + The record. """ with self._connection_pool.connection() as conn, conn.cursor() as cur: if not await self.__does_collection_exist(cur, collection_name): @@ -289,12 +289,12 @@ async def get_batch( """Gets a batch of records. Args: - collection_name (str): The name of the collection to get the records from. - keys (List[str]): The unique database keys of the records. - with_embeddings (bool): Whether to include the embeddings in the results. (default: {False}) + collection_name: The name of the collection to get the records from. + keys: The unique database keys of the records. + with_embeddings: Whether to include the embeddings in the results. (default: {False}) Returns: - List[MemoryRecord]: The records that were found from list of keys, can be empty. + The records that were found from list of keys, can be empty. """ with self._connection_pool.connection() as conn, conn.cursor() as cur: if not await self.__does_collection_exist(cur, collection_name): @@ -317,9 +317,7 @@ async def get_batch( MemoryRecord.local_record( id=result[0], embedding=( - np.fromstring(result[1].strip("[]"), dtype=float, sep=",") - if with_embeddings - else np.array([]) + np.fromstring(result[1].strip("[]"), dtype=float, sep=",") if with_embeddings else np.array([]) ), text=result[2]["text"], description=result[2]["description"], @@ -333,8 +331,8 @@ async def remove(self, collection_name: str, key: str) -> None: """Removes a record. Args: - collection_name (str): The name of the collection to remove the record from. - key (str): The unique database key of the record to remove. + collection_name: The name of the collection to remove the record from. + key: The unique database key of the record to remove. Returns: None @@ -356,8 +354,8 @@ async def remove_batch(self, collection_name: str, keys: list[str]) -> None: """Removes a batch of records. Args: - collection_name (str): The name of the collection to remove the records from. - keys (List[str]): The unique database keys of the records to remove. + collection_name: The name of the collection to remove the records from. + keys: The unique database keys of the records to remove. Returns: None @@ -386,14 +384,14 @@ async def get_nearest_matches( """Gets the nearest matches to an embedding using cosine similarity. Args: - collection_name (str): The name of the collection to get the nearest matches from. - embedding (ndarray): The embedding to find the nearest matches to. - limit (int): The maximum number of matches to return. - min_relevance_score (float): The minimum relevance score of the matches. (default: {0.0}) - with_embeddings (bool): Whether to include the embeddings in the results. (default: {False}) + collection_name: The name of the collection to get the nearest matches from. + embedding: The embedding to find the nearest matches to. + limit: The maximum number of matches to return. + min_relevance_score: The minimum relevance score of the matches. (default: {0.0}) + with_embeddings: Whether to include the embeddings in the results. (default: {False}) Returns: - List[Tuple[MemoryRecord, float]]: The records and their relevance scores. + The records and their relevance scores. """ with self._connection_pool.connection() as conn, conn.cursor() as cur: if not await self.__does_collection_exist(cur, collection_name): @@ -454,10 +452,10 @@ async def get_nearest_match( """Gets the nearest match to an embedding using cosine similarity. Args: - collection_name (str): The name of the collection to get the nearest match from. - embedding (ndarray): The embedding to find the nearest match to. - min_relevance_score (float): The minimum relevance score of the match. (default: {0.0}) - with_embedding (bool): Whether to include the embedding in the result. (default: {False}) + collection_name: The name of the collection to get the nearest match from. + embedding: The embedding to find the nearest match to. + min_relevance_score: The minimum relevance score of the match. (default: {0.0}) + with_embedding: Whether to include the embedding in the result. (default: {False}) Returns: Tuple[MemoryRecord, float]: The record and the relevance score. @@ -497,10 +495,18 @@ def _check_dimensionality(self, dimension_num): raise ServiceInitializationError("Dimensionality must be a positive integer. ") def __serialize_metadata(self, record: MemoryRecord) -> str: - return json.dumps( - { - "text": record._text, - "description": record._description, - "additional_metadata": record._additional_metadata, - } - ) + return json.dumps({ + "text": record._text, + "description": record._description, + "additional_metadata": record._additional_metadata, + }) + + # Enable the connection pool to be closed when using as a context manager + def __enter__(self) -> "PostgresMemoryStore": + """Enter the runtime context.""" + return self + + def __exit__(self, exc_type, exc_value, traceback) -> bool: + """Exit the runtime context and dispose of the connection pool.""" + self._connection_pool.close() + return False diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py b/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py index feb3901210b7..32cd56f7b079 100644 --- a/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_settings.py @@ -1,9 +1,20 @@ # Copyright (c) Microsoft. All rights reserved. -from typing import ClassVar +from typing import Any, ClassVar -from pydantic import SecretStr +from psycopg.conninfo import conninfo_to_dict +from psycopg_pool import AsyncConnectionPool +from pydantic import Field, SecretStr +from semantic_kernel.connectors.memory.postgres.constants import ( + PGDATABASE_ENV_VAR, + PGHOST_ENV_VAR, + PGPASSWORD_ENV_VAR, + PGPORT_ENV_VAR, + PGSSL_MODE_ENV_VAR, + PGUSER_ENV_VAR, +) +from semantic_kernel.exceptions.memory_connector_exceptions import MemoryConnectorInitializationError from semantic_kernel.kernel_pydantic import KernelBaseSettings from semantic_kernel.utils.experimental_decorator import experimental_class @@ -12,11 +23,92 @@ class PostgresSettings(KernelBaseSettings): """Postgres model settings. + This class is used to configure the Postgres connection pool + and other settings related to the Postgres store. + + The settings that match what can be configured on tools such as + psql, pg_dump, pg_restore, pgbench, createdb, and + `libpq `_ + match the environment variables used by those tools. This includes + PGHOST, PGPORT, PGDATABASE, PGUSER, PGPASSWORD, and PGSSL_MODE. + Other settings follow the standard pattern of Pydantic settings, + e.g. POSTGRES_CONNECTION_STRING. + Args: - - connection_string: str - Postgres connection string + connection_string: Postgres connection string (Env var POSTGRES_CONNECTION_STRING) + host: Postgres host (Env var PGHOST) + port: Postgres port (Env var PGPORT) + dbname: Postgres database name (Env var PGDATABASE) + user: Postgres user (Env var PGUSER) + password: Postgres password (Env var PGPASSWORD) + sslmode: Postgres sslmode (Env var PGSSL_MODE) + Use "require" to require SSL, "disable" to disable SSL, or "prefer" to prefer + SSL but allow a connection without it. Defaults to "prefer". + min_pool: Minimum connection pool size. Defaults to 1. + (Env var POSTGRES_MIN_POOL) + max_pool: Maximum connection pool size. Defaults to 5. + (Env var POSTGRES_MAX_POOL) + default_dimensionality: Default dimensionality for vectors. Defaults to 100. + (Env var POSTGRES_DEFAULT_DIMENSIONALITY) + max_rows_per_transaction: Maximum number of rows to process in a single transaction. Defaults to 1000. + (Env var POSTGRES_MAX_ROWS_PER_TRANSACTION) """ env_prefix: ClassVar[str] = "POSTGRES_" - connection_string: SecretStr + connection_string: SecretStr | None = None + host: str | None = Field(None, alias=PGHOST_ENV_VAR) + port: int | None = Field(5432, alias=PGPORT_ENV_VAR) + dbname: str | None = Field(None, alias=PGDATABASE_ENV_VAR) + user: str | None = Field(None, alias=PGUSER_ENV_VAR) + password: SecretStr | None = Field(None, alias=PGPASSWORD_ENV_VAR) + sslmode: str | None = Field(None, alias=PGSSL_MODE_ENV_VAR) + + min_pool: int = 1 + max_pool: int = 5 + + default_dimensionality: int = 100 + max_rows_per_transaction: int = 1000 + + def get_connection_args(self) -> dict[str, Any]: + """Get connection arguments.""" + result = conninfo_to_dict(self.connection_string.get_secret_value()) if self.connection_string else {} + + if self.host: + result["host"] = self.host + if self.port: + result["port"] = self.port + if self.dbname: + result["dbname"] = self.dbname + if self.user: + result["user"] = self.user + if self.password: + result["password"] = self.password.get_secret_value() + + # Ensure required values + if "host" not in result: + raise MemoryConnectorInitializationError("host is required. Please set PGHOST or connection_string.") + if "dbname" not in result: + raise MemoryConnectorInitializationError( + "database is required. Please set PGDATABASE or connection_string." + ) + if "user" not in result: + raise MemoryConnectorInitializationError("user is required. Please set PGUSER or connection_string.") + if "password" not in result: + raise MemoryConnectorInitializationError( + "password is required. Please set PGPASSWORD or connection_string." + ) + + return result + + async def create_connection_pool(self) -> AsyncConnectionPool: + """Creates a connection pool based off of settings.""" + pool = AsyncConnectionPool( + min_size=self.min_pool, + max_size=self.max_pool, + open=False, + kwargs=self.get_connection_args(), + ) + await pool.open() + return pool diff --git a/python/semantic_kernel/connectors/memory/postgres/postgres_store.py b/python/semantic_kernel/connectors/memory/postgres/postgres_store.py new file mode 100644 index 000000000000..1e857bc035df --- /dev/null +++ b/python/semantic_kernel/connectors/memory/postgres/postgres_store.py @@ -0,0 +1,75 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +import sys +from typing import Any, TypeVar + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +from psycopg import sql +from psycopg_pool import AsyncConnectionPool + +from semantic_kernel.connectors.memory.postgres.postgres_collection import PostgresCollection +from semantic_kernel.connectors.memory.postgres.postgres_memory_store import DEFAULT_SCHEMA +from semantic_kernel.data.vector_store import VectorStore +from semantic_kernel.data.vector_store_model_definition import VectorStoreRecordDefinition +from semantic_kernel.data.vector_store_record_collection import VectorStoreRecordCollection +from semantic_kernel.utils.experimental_decorator import experimental_class + +logger: logging.Logger = logging.getLogger(__name__) + +TModel = TypeVar("TModel") + + +@experimental_class +class PostgresStore(VectorStore): + """PostgreSQL store implementation.""" + + connection_pool: AsyncConnectionPool + db_schema: str = DEFAULT_SCHEMA + tables: list[str] | None = None + """Tables to consider as collections. Default is all tables in the schema.""" + + @override + async def list_collection_names(self, **kwargs: Any) -> list[str]: + async with self.connection_pool.connection() as conn, conn.cursor() as cur: + base_query = sql.SQL(""" + SELECT table_name + FROM information_schema.tables + WHERE table_schema = {} + """).format(sql.Placeholder()) + + params = [self.db_schema] + + if self.tables: + table_placeholders = sql.SQL(", ").join(sql.Placeholder() * len(self.tables)) + base_query += sql.SQL(" AND table_name IN ({})").format(table_placeholders) + params.extend(self.tables) + + await cur.execute(base_query, params) + rows = await cur.fetchall() + return [row[0] for row in rows] + + @override + def get_collection( + self, + collection_name: str, + data_model_type: type[TModel], + data_model_definition: VectorStoreRecordDefinition | None = None, + **kwargs: Any, + ) -> VectorStoreRecordCollection: + if collection_name not in self.vector_record_collections: + self.vector_record_collections[collection_name] = PostgresCollection( + connection_pool=self.connection_pool, + db_schema=self.db_schema, + collection_name=collection_name, + data_model_type=data_model_type, + # data model definition will be validated in the collection + data_model_definition=data_model_definition, # type: ignore + **kwargs, + ) + + return self.vector_record_collections[collection_name] diff --git a/python/semantic_kernel/connectors/memory/postgres/utils.py b/python/semantic_kernel/connectors/memory/postgres/utils.py new file mode 100644 index 000000000000..9435a4a51560 --- /dev/null +++ b/python/semantic_kernel/connectors/memory/postgres/utils.py @@ -0,0 +1,134 @@ +# Copyright (c) Microsoft. All rights reserved. + +import json +import re +from typing import Any + +from psycopg_pool import AsyncConnectionPool + +from semantic_kernel.data.const import DistanceFunction +from semantic_kernel.data.vector_store_record_fields import VectorStoreRecordField, VectorStoreRecordVectorField + + +def python_type_to_postgres(python_type_str: str) -> str | None: + """Convert a string representation of a Python type to a PostgreSQL data type. + + Args: + python_type_str: The string representation of the Python type (e.g., "int", "List[str]"). + + Returns: + Corresponding PostgreSQL data type as a string, if found. If the type is not found, return None. + """ + # Basic type mapping from Python types (in string form) to PostgreSQL types + type_mapping = { + "str": "TEXT", + "int": "INTEGER", + "float": "DOUBLE PRECISION", + "bool": "BOOLEAN", + "dict": "JSONB", + "datetime": "TIMESTAMP", + "bytes": "BYTEA", + "NoneType": "NULL", + } + + # Regular expression to detect lists, e.g., "List[str]" or "List[int]" + list_pattern = re.compile(r"(?i)List\[(.*)\]") + + # Check if the type is a list + match = list_pattern.match(python_type_str) + if match: + # Extract the inner type of the list and convert it to a PostgreSQL array type + element_type_str = match.group(1) + postgres_element_type = python_type_to_postgres(element_type_str) + return f"{postgres_element_type}[]" + + # Handle basic types + if python_type_str in type_mapping: + return type_mapping[python_type_str] + + return None + + +def convert_row_to_dict(row: tuple[Any, ...], fields: list[tuple[str, VectorStoreRecordField]]) -> dict[str, Any]: + """Convert a row from a PostgreSQL query to a dictionary. + + Uses the field information to map the row values to the corresponding field names. + + Args: + row: A row from a PostgreSQL query, represented as a tuple. + fields: A list of tuples, where each tuple contains the field name and field definition. + + Returns: + A dictionary representation of the row. + """ + + def _convert(v: Any | None, field: VectorStoreRecordField) -> Any | None: + if v is None: + return None + if isinstance(field, VectorStoreRecordVectorField): + # psycopg returns vector as a string + return json.loads(v) + return v + + return {field_name: _convert(value, field) for (field_name, field), value in zip(fields, row)} + + +def convert_dict_to_row(record: dict[str, Any], fields: list[tuple[str, VectorStoreRecordField]]) -> tuple[Any, ...]: + """Convert a dictionary to a row for a PostgreSQL query. + + Args: + record: A dictionary representing a record. + fields: A list of tuples, where each tuple contains the field name and field definition. + + Returns: + A tuple representing the record. + """ + + def _convert(v: Any | None) -> Any | None: + if isinstance(v, dict): + # psycopg requires serializing dicts as strings. + return json.dumps(v) + return v + + return tuple(_convert(record.get(field.name)) for _, field in fields) + + +def get_vector_index_ops_str(distance_function: DistanceFunction) -> str: + """Get the PostgreSQL ops string for creating an index for a given distance function. + + Args: + distance_function: The distance function the index is created for. + + Returns: + The PostgreSQL ops string for the given distance function. + + Examples: + >>> get_vector_index_ops_str(DistanceFunction.COSINE) + 'vector_cosine_ops' + """ + if distance_function == DistanceFunction.COSINE: + return "vector_cosine_ops" + if distance_function == DistanceFunction.DOT_PROD: + return "vector_ip_ops" + if distance_function == DistanceFunction.EUCLIDEAN: + return "vector_l2_ops" + if distance_function == DistanceFunction.MANHATTAN: + return "vector_l1_ops" + + raise ValueError(f"Unsupported distance function: {distance_function}") + + +async def ensure_open(connection_pool: AsyncConnectionPool) -> AsyncConnectionPool: + """Ensure the connection pool is open. + + It is safe to call open on an already open connection pool. + Use this wrapper to ensure the connection pool is open before using it. + + Args: + connection_pool: The connection pool to ensure is open. + + Returns: + The connection pool, after ensuring it is open + """ + await connection_pool.open() + return connection_pool diff --git a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation_parameter.py b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation_parameter.py index 761e390c9d4c..0f8745e08f2e 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation_parameter.py +++ b/python/semantic_kernel/connectors/openapi_plugin/models/rest_api_operation_parameter.py @@ -28,7 +28,7 @@ def __init__( description: str | None = None, is_required: bool = False, default_value: Any | None = None, - schema: str | None = None, + schema: str | dict | None = None, response: RestApiOperationExpectedResponse | None = None, ): """Initialize the RestApiOperationParameter.""" diff --git a/python/semantic_kernel/connectors/openapi_plugin/openapi_parser.py b/python/semantic_kernel/connectors/openapi_plugin/openapi_parser.py index 85f13a096908..984f120e837b 100644 --- a/python/semantic_kernel/connectors/openapi_plugin/openapi_parser.py +++ b/python/semantic_kernel/connectors/openapi_plugin/openapi_parser.py @@ -63,26 +63,27 @@ def _parse_parameters(self, parameters: list[dict[str, Any]]): """Parse the parameters from the OpenAPI document.""" result: list[RestApiOperationParameter] = [] for param in parameters: - name = param["name"] - type = param["schema"]["type"] + name: str = param["name"] if not param.get("in"): raise PluginInitializationError(f"Parameter {name} is missing 'in' field") + if param.get("content", None) is not None: + # The schema and content fields are mutually exclusive. + raise PluginInitializationError(f"Parameter {name} cannot have a 'content' field. Expected: schema.") location = RestApiOperationParameterLocation(param["in"]) - description = param.get("description", None) - is_required = param.get("required", False) + description: str = param.get("description", None) + is_required: bool = param.get("required", False) default_value = param.get("default", None) - schema = param.get("schema", None) - schema_type = schema.get("type", None) if schema else "string" + schema: dict[str, Any] | None = param.get("schema", None) result.append( RestApiOperationParameter( name=name, - type=type, + type=schema.get("type", "string") if schema else "string", location=location, description=description, is_required=is_required, default_value=default_value, - schema=schema_type, + schema=schema if schema else {"type": "string"}, ) ) return result diff --git a/python/semantic_kernel/connectors/utils/structured_output_schema.py b/python/semantic_kernel/connectors/utils/structured_output_schema.py new file mode 100644 index 000000000000..d274ed8e7fc5 --- /dev/null +++ b/python/semantic_kernel/connectors/utils/structured_output_schema.py @@ -0,0 +1,11 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + + +def generate_structured_output_response_format_schema(name: str, schema: dict[str, Any]) -> dict[str, Any]: + """Generate the structured output response format schema.""" + return { + "type": "json_schema", + "json_schema": {"name": name, "strict": True, "schema": schema}, + } diff --git a/python/semantic_kernel/const.py b/python/semantic_kernel/const.py index 1891281fcb91..8d43e136c755 100644 --- a/python/semantic_kernel/const.py +++ b/python/semantic_kernel/const.py @@ -6,3 +6,5 @@ DEFAULT_SERVICE_NAME: Final[str] = "default" USER_AGENT: Final[str] = "User-Agent" PARSED_ANNOTATION_UNION_DELIMITER: Final[str] = "," + +AUTO_FUNCTION_INVOCATION_SPAN_NAME: Final[str] = "AutoFunctionInvocationLoop" diff --git a/python/semantic_kernel/contents/__init__.py b/python/semantic_kernel/contents/__init__.py index 7563bdbcfe93..2e393ca7bf7e 100644 --- a/python/semantic_kernel/contents/__init__.py +++ b/python/semantic_kernel/contents/__init__.py @@ -6,7 +6,9 @@ from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole @@ -21,7 +23,9 @@ "FunctionCallContent", "FunctionResultContent", "ImageContent", + "StreamingAnnotationContent", "StreamingChatMessageContent", + "StreamingFileReferenceContent", "StreamingTextContent", "TextContent", ] diff --git a/python/semantic_kernel/contents/chat_message_content.py b/python/semantic_kernel/contents/chat_message_content.py index ced273de75a2..da9005626de9 100644 --- a/python/semantic_kernel/contents/chat_message_content.py +++ b/python/semantic_kernel/contents/chat_message_content.py @@ -3,7 +3,7 @@ import logging from enum import Enum from html import unescape -from typing import Any, ClassVar, Literal, Union, overload +from typing import Annotated, Any, ClassVar, Literal, overload from xml.etree.ElementTree import Element # nosec from defusedxml import ElementTree @@ -18,6 +18,8 @@ FUNCTION_CALL_CONTENT_TAG, FUNCTION_RESULT_CONTENT_TAG, IMAGE_CONTENT_TAG, + STREAMING_ANNOTATION_CONTENT_TAG, + STREAMING_FILE_REFERENCE_CONTENT_TAG, TEXT_CONTENT_TAG, ContentTypes, ) @@ -26,7 +28,8 @@ from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent from semantic_kernel.contents.kernel_content import KernelContent -from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason @@ -39,17 +42,20 @@ FUNCTION_CALL_CONTENT_TAG: FunctionCallContent, FUNCTION_RESULT_CONTENT_TAG: FunctionResultContent, IMAGE_CONTENT_TAG: ImageContent, + STREAMING_FILE_REFERENCE_CONTENT_TAG: StreamingFileReferenceContent, + STREAMING_ANNOTATION_CONTENT_TAG: StreamingAnnotationContent, } -ITEM_TYPES = Union[ - AnnotationContent, - ImageContent, - TextContent, - StreamingTextContent, - FunctionResultContent, - FunctionCallContent, - FileReferenceContent, -] +ITEM_TYPES = ( + AnnotationContent + | ImageContent + | TextContent + | FunctionResultContent + | FunctionCallContent + | FileReferenceContent + | StreamingAnnotationContent + | StreamingFileReferenceContent +) logger = logging.getLogger(__name__) @@ -78,7 +84,7 @@ class ChatMessageContent(KernelContent): tag: ClassVar[str] = CHAT_MESSAGE_CONTENT_TAG role: AuthorRole name: str | None = None - items: list[ITEM_TYPES] = Field(default_factory=list, discriminator=DISCRIMINATOR_FIELD) + items: list[Annotated[ITEM_TYPES, Field(..., discriminator=DISCRIMINATOR_FIELD)]] = Field(default_factory=list) encoding: str | None = None finish_reason: FinishReason | None = None diff --git a/python/semantic_kernel/contents/const.py b/python/semantic_kernel/contents/const.py index 0e2a34e876b3..e0d1cbf76a6e 100644 --- a/python/semantic_kernel/contents/const.py +++ b/python/semantic_kernel/contents/const.py @@ -7,8 +7,10 @@ TEXT_CONTENT_TAG: Final[str] = "text" IMAGE_CONTENT_TAG: Final[str] = "image" ANNOTATION_CONTENT_TAG: Final[str] = "annotation" +STREAMING_ANNOTATION_CONTENT_TAG: Final[str] = "streaming_annotation" BINARY_CONTENT_TAG: Final[str] = "binary" FILE_REFERENCE_CONTENT_TAG: Final[str] = "file_reference" +STREAMING_FILE_REFERENCE_CONTENT_TAG: Final[str] = "streaming_file_reference" FUNCTION_CALL_CONTENT_TAG: Final[str] = "function_call" FUNCTION_RESULT_CONTENT_TAG: Final[str] = "function_result" DISCRIMINATOR_FIELD: Final[str] = "content_type" @@ -24,4 +26,6 @@ class ContentTypes(str, Enum): FILE_REFERENCE_CONTENT = FILE_REFERENCE_CONTENT_TAG FUNCTION_CALL_CONTENT = FUNCTION_CALL_CONTENT_TAG FUNCTION_RESULT_CONTENT = FUNCTION_RESULT_CONTENT_TAG + STREAMING_ANNOTATION_CONTENT = STREAMING_ANNOTATION_CONTENT_TAG + STREAMING_FILE_REFERENCE_CONTENT = STREAMING_FILE_REFERENCE_CONTENT_TAG TEXT_CONTENT = TEXT_CONTENT_TAG diff --git a/python/semantic_kernel/contents/streaming_annotation_content.py b/python/semantic_kernel/contents/streaming_annotation_content.py new file mode 100644 index 000000000000..ff44aba5aa0e --- /dev/null +++ b/python/semantic_kernel/contents/streaming_annotation_content.py @@ -0,0 +1,63 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Any, ClassVar, Literal, TypeVar +from xml.etree.ElementTree import Element # nosec + +from pydantic import Field + +from semantic_kernel.contents.const import STREAMING_ANNOTATION_CONTENT_TAG, ContentTypes +from semantic_kernel.contents.kernel_content import KernelContent +from semantic_kernel.utils.experimental_decorator import experimental_class + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="StreamingAnnotationContent") + + +@experimental_class +class StreamingAnnotationContent(KernelContent): + """Streaming Annotation content.""" + + content_type: Literal[ContentTypes.STREAMING_ANNOTATION_CONTENT] = Field( + STREAMING_ANNOTATION_CONTENT_TAG, init=False + ) # type: ignore + tag: ClassVar[str] = STREAMING_ANNOTATION_CONTENT_TAG + file_id: str | None = None + quote: str | None = None + start_index: int | None = None + end_index: int | None = None + + def __str__(self) -> str: + """Return the string representation of the annotation content.""" + return f"StreamingAnnotationContent(file_id={self.file_id}, quote={self.quote}, start_index={self.start_index}, end_index={self.end_index})" # noqa: E501 + + def to_element(self) -> Element: + """Convert the annotation content to an Element.""" + element = Element(self.tag) + if self.file_id: + element.set("file_id", self.file_id) + if self.quote: + element.set("quote", self.quote) + if self.start_index is not None: + element.set("start_index", str(self.start_index)) + if self.end_index is not None: + element.set("end_index", str(self.end_index)) + return element + + @classmethod + def from_element(cls: type[_T], element: Element) -> _T: + """Create an instance from an Element.""" + return cls( + file_id=element.get("file_id"), + quote=element.get("quote"), + start_index=int(element.get("start_index")) if element.get("start_index") else None, # type: ignore + end_index=int(element.get("end_index")) if element.get("end_index") else None, # type: ignore + ) + + def to_dict(self) -> dict[str, Any]: + """Convert the instance to a dictionary.""" + return { + "type": "text", + "text": f"{self.file_id} {self.quote} (Start Index={self.start_index}->End Index={self.end_index})", + } diff --git a/python/semantic_kernel/contents/streaming_chat_message_content.py b/python/semantic_kernel/contents/streaming_chat_message_content.py index b2aa2e0ea87b..51110b43ea5c 100644 --- a/python/semantic_kernel/contents/streaming_chat_message_content.py +++ b/python/semantic_kernel/contents/streaming_chat_message_content.py @@ -8,7 +8,9 @@ from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.function_result_content import FunctionResultContent from semantic_kernel.contents.image_content import ImageContent +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent from semantic_kernel.contents.streaming_content_mixin import StreamingContentMixin +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent from semantic_kernel.contents.streaming_text_content import StreamingTextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.contents.utils.finish_reason import FinishReason @@ -19,6 +21,8 @@ StreamingTextContent, FunctionCallContent, FunctionResultContent, + StreamingFileReferenceContent, + StreamingAnnotationContent, ] @@ -146,8 +150,13 @@ def __bytes__(self) -> bytes: def __add__(self, other: "StreamingChatMessageContent") -> "StreamingChatMessageContent": """When combining two StreamingChatMessageContent instances, the content fields are combined. - The inner_content of the first one is used, ai_model_id and encoding should be the same, - if role is set, they should be the same. + The addition should follow these rules: + 1. The inner_content of the two will be combined. If they are not lists, they will be converted to lists. + 2. ai_model_id should be the same. + 3. encoding should be the same. + 4. role should be the same. + 5. choice_index should be the same. + 6. Metadata will be combined """ if not isinstance(other, StreamingChatMessageContent): raise ContentAdditionException( @@ -161,36 +170,14 @@ def __add__(self, other: "StreamingChatMessageContent") -> "StreamingChatMessage raise ContentAdditionException("Cannot add StreamingChatMessageContent with different encoding") if self.role and other.role and self.role != other.role: raise ContentAdditionException("Cannot add StreamingChatMessageContent with different role") - if self.items or other.items: - for other_item in other.items: - added = False - for id, item in enumerate(list(self.items)): - if type(item) is type(other_item) and hasattr(item, "__add__"): - try: - new_item = item + other_item # type: ignore - self.items[id] = new_item - added = True - except (ValueError, ContentAdditionException): - continue - if not added: - self.items.append(other_item) - if not isinstance(self.inner_content, list): - self.inner_content = [self.inner_content] if self.inner_content else [] - other_content = ( - other.inner_content - if isinstance(other.inner_content, list) - else [other.inner_content] - if other.inner_content - else [] - ) - self.inner_content.extend(other_content) + return StreamingChatMessageContent( role=self.role, - items=self.items, # type: ignore + items=self._merge_items_lists(other.items), choice_index=self.choice_index, - inner_content=self.inner_content, + inner_content=self._merge_inner_contents(other.inner_content), ai_model_id=self.ai_model_id, - metadata=self.metadata, + metadata=self.metadata | other.metadata, encoding=self.encoding, finish_reason=self.finish_reason or other.finish_reason, ) @@ -217,3 +204,15 @@ def to_element(self) -> "Element": for index, item in enumerate(self.items): root.insert(index, item.to_element()) return root + + def __hash__(self) -> int: + """Return the hash of the streaming chat message content.""" + return hash(( + self.tag, + self.role, + self.content, + self.encoding, + self.finish_reason, + self.choice_index, + *self.items, + )) diff --git a/python/semantic_kernel/contents/streaming_content_mixin.py b/python/semantic_kernel/contents/streaming_content_mixin.py index 4441e92f9fe7..c1640bb9a3a3 100644 --- a/python/semantic_kernel/contents/streaming_content_mixin.py +++ b/python/semantic_kernel/contents/streaming_content_mixin.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +import logging import sys from abc import ABC, abstractmethod from typing import Any @@ -9,8 +10,11 @@ else: from typing_extensions import Self # pragma: no cover +from semantic_kernel.exceptions.content_exceptions import ContentAdditionException from semantic_kernel.kernel_pydantic import KernelBaseModel +logger: logging.Logger = logging.getLogger(__name__) + class StreamingContentMixin(KernelBaseModel, ABC): """Mixin class for all streaming kernel contents.""" @@ -26,3 +30,55 @@ def __bytes__(self) -> bytes: def __add__(self, other: Any) -> Self: """Combine two streaming contents together.""" pass + + def _merge_items_lists(self, other_items: list[Any]) -> list[Any]: + """Create a new list with the items of the current instance and the given list.""" + if not hasattr(self, "items"): + raise ContentAdditionException(f"Cannot merge items for this instance of type: {type(self)}") + + # Create a copy of the items list to avoid modifying the original instance. + # Note that the items are not copied, only the list is. + new_items_list = self.items.copy() + + if new_items_list or other_items: + for other_item in other_items: + added = False + for id, item in enumerate(new_items_list): + if type(item) is type(other_item) and hasattr(item, "__add__"): + try: + new_item = item + other_item # type: ignore + new_items_list[id] = new_item + added = True + except (ValueError, ContentAdditionException) as ex: + logger.debug(f"Could not add item {other_item} to {item}.", exc_info=ex) + continue + if not added: + logger.debug(f"Could not add item {other_item} to any item in the list. Adding it as a new item.") + new_items_list.append(other_item) + + return new_items_list + + def _merge_inner_contents(self, other_inner_content: Any | list[Any]) -> list[Any]: + """Create a new list with the inner content of the current instance and the given one.""" + if not hasattr(self, "inner_content"): + raise ContentAdditionException(f"Cannot merge inner content for this instance of type: {type(self)}") + + # Create a copy of the inner content list to avoid modifying the original instance. + # Note that the inner content is not copied, only the list is. + # If the inner content is not a list, it is converted to a list. + if isinstance(self.inner_content, list): + new_inner_contents_list = self.inner_content.copy() + else: + new_inner_contents_list = [self.inner_content] + + other_inner_content = ( + other_inner_content + if isinstance(other_inner_content, list) + else [other_inner_content] + if other_inner_content + else [] + ) + + new_inner_contents_list.extend(other_inner_content) + + return new_inner_contents_list diff --git a/python/semantic_kernel/contents/streaming_file_reference_content.py b/python/semantic_kernel/contents/streaming_file_reference_content.py new file mode 100644 index 000000000000..fe55b9ca9cdd --- /dev/null +++ b/python/semantic_kernel/contents/streaming_file_reference_content.py @@ -0,0 +1,50 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Any, ClassVar, Literal, TypeVar +from xml.etree.ElementTree import Element # nosec + +from pydantic import Field + +from semantic_kernel.contents.const import STREAMING_FILE_REFERENCE_CONTENT_TAG, ContentTypes +from semantic_kernel.contents.kernel_content import KernelContent +from semantic_kernel.utils.experimental_decorator import experimental_class + +logger = logging.getLogger(__name__) + +_T = TypeVar("_T", bound="StreamingFileReferenceContent") + + +@experimental_class +class StreamingFileReferenceContent(KernelContent): + """Streaming File reference content.""" + + content_type: Literal[ContentTypes.STREAMING_FILE_REFERENCE_CONTENT] = Field( + STREAMING_FILE_REFERENCE_CONTENT_TAG, init=False + ) # type: ignore + tag: ClassVar[str] = STREAMING_FILE_REFERENCE_CONTENT_TAG + file_id: str | None = None + + def __str__(self) -> str: + """Return the string representation of the file reference content.""" + return f"StreamingFileReferenceContent(file_id={self.file_id})" + + def to_element(self) -> Element: + """Convert the file reference content to an Element.""" + element = Element(self.tag) + if self.file_id: + element.set("file_id", self.file_id) + return element + + @classmethod + def from_element(cls: type[_T], element: Element) -> _T: + """Create an instance from an Element.""" + return cls( + file_id=element.get("file_id"), + ) + + def to_dict(self) -> dict[str, Any]: + """Convert the instance to a dictionary.""" + return { + "file_id": self.file_id, + } diff --git a/python/semantic_kernel/contents/streaming_text_content.py b/python/semantic_kernel/contents/streaming_text_content.py index 80c25f89d809..66440be6ae6b 100644 --- a/python/semantic_kernel/contents/streaming_text_content.py +++ b/python/semantic_kernel/contents/streaming_text_content.py @@ -31,7 +31,12 @@ def __bytes__(self) -> bytes: def __add__(self, other: TextContent) -> "StreamingTextContent": """When combining two StreamingTextContent instances, the text fields are combined. - The inner_content of the first one is used, choice_index, ai_model_id and encoding should be the same. + The addition should follow these rules: + 1. The inner_content of the two will be combined. If they are not lists, they will be converted to lists. + 2. ai_model_id should be the same. + 3. encoding should be the same. + 4. choice_index should be the same. + 5. Metadata will be combined. """ if isinstance(other, StreamingTextContent) and self.choice_index != other.choice_index: raise ContentAdditionException("Cannot add StreamingTextContent with different choice_index") @@ -39,9 +44,10 @@ def __add__(self, other: TextContent) -> "StreamingTextContent": raise ContentAdditionException("Cannot add StreamingTextContent from different ai_model_id") if self.encoding != other.encoding: raise ContentAdditionException("Cannot add StreamingTextContent with different encoding") + return StreamingTextContent( choice_index=self.choice_index, - inner_content=self.inner_content, + inner_content=self._merge_inner_contents(other.inner_content), ai_model_id=self.ai_model_id, metadata=self.metadata, text=(self.text or "") + (other.text or ""), diff --git a/python/semantic_kernel/contents/utils/data_uri.py b/python/semantic_kernel/contents/utils/data_uri.py index 3cf080af8577..a4407ff2237b 100644 --- a/python/semantic_kernel/contents/utils/data_uri.py +++ b/python/semantic_kernel/contents/utils/data_uri.py @@ -10,7 +10,7 @@ if sys.version < "3.11": from typing_extensions import Self # pragma: no cover else: - from typing import Self # pragma: no cover + from typing import Self # type: ignore # pragma: no cover from pydantic import Field, ValidationError, field_validator, model_validator from pydantic_core import Url diff --git a/python/semantic_kernel/core_plugins/sessions_python_tool/README.md b/python/semantic_kernel/core_plugins/sessions_python_tool/README.md index 472789bb5e33..5312bc8c617f 100644 --- a/python/semantic_kernel/core_plugins/sessions_python_tool/README.md +++ b/python/semantic_kernel/core_plugins/sessions_python_tool/README.md @@ -6,7 +6,7 @@ Please follow the [Azure Container Apps Documentation](https://learn.microsoft.c ## Configuring the Python Plugin -To successfully use the Python Plugin in Semantic Kernel, you must install the Poetry `azure` extras by running `poetry install -E azure`. +To successfully use the Python Plugin in Semantic Kernel, you must install the `azure` extras by running `uv sync --extra azure` or `pip install semantic-kernel[azure]`. Next, as an environment variable or in the .env file, add the `poolManagementEndpoint` value from above to the variable `ACA_POOL_MANAGEMENT_ENDPOINT`. The `poolManagementEndpoint` should look something like: @@ -14,7 +14,10 @@ Next, as an environment variable or in the .env file, add the `poolManagementEnd https://eastus.acasessions.io/subscriptions/{{subscriptionId}}/resourceGroups/{{resourceGroup}}/sessionPools/{{sessionPool}}/python/execute ``` -It is possible to add the code interpreter plugin as follows: +You can also provide the the `ACA_TOKEN_ENDPOINT` if you want to override the default value of `https://acasessions.io/.default`. If this token endpoint doesn't need to be overridden, then it is +not necessary to include this as an environment variable, in the .env file, or via the plugin's constructor. Please follow the [Azure Container Apps Documentation](https://learn.microsoft.com/en-us/azure/container-apps/sessions-code-interpreter) to review the proper role required to authenticate with the `DefaultAzureCredential`. + +Next, let's move on to implementing the plugin in code. It is possible to add the code interpreter plugin as follows: ```python kernel = Kernel() diff --git a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_plugin.py b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_plugin.py index 70849ce6827d..c2fba26a5ff9 100644 --- a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_plugin.py +++ b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_plugin.py @@ -1,5 +1,6 @@ # Copyright (c) Microsoft. All rights reserved. +import inspect import logging import os import re @@ -34,22 +35,25 @@ class SessionsPythonTool(KernelBaseModel): pool_management_endpoint: HttpsUrl settings: SessionsPythonSettings - auth_callback: Callable[..., Awaitable[Any]] + auth_callback: Callable[..., Any | Awaitable[Any]] http_client: AsyncClient def __init__( self, - auth_callback: Callable[..., Awaitable[Any]], + auth_callback: Callable[..., Any | Awaitable[Any]] | None = None, pool_management_endpoint: str | None = None, settings: SessionsPythonSettings | None = None, http_client: AsyncClient | None = None, env_file_path: str | None = None, + token_endpoint: str | None = None, **kwargs, ): """Initializes a new instance of the SessionsPythonTool class.""" try: aca_settings = ACASessionsSettings.create( - env_file_path=env_file_path, pool_management_endpoint=pool_management_endpoint + env_file_path=env_file_path, + pool_management_endpoint=pool_management_endpoint, + token_endpoint=token_endpoint, ) except ValidationError as e: logger.error(f"Failed to load the ACASessionsSettings with message: {e!s}") @@ -61,6 +65,9 @@ def __init__( if not http_client: http_client = AsyncClient() + if auth_callback is None: + auth_callback = self._default_auth_callback(aca_settings) + super().__init__( pool_management_endpoint=aca_settings.pool_management_endpoint, settings=settings, @@ -70,13 +77,29 @@ def __init__( ) # region Helper Methods + def _default_auth_callback(self, aca_settings: ACASessionsSettings) -> Callable[..., Any | Awaitable[Any]]: + """Generates a default authentication callback using the ACA settings.""" + token = aca_settings.get_sessions_auth_token() + + if token is None: + raise FunctionInitializationError("Failed to retrieve the client auth token.") + + def auth_callback() -> str: + """Retrieve the client auth token.""" + return token + + return auth_callback + async def _ensure_auth_token(self) -> str: - """Ensure the auth token is valid.""" + """Ensure the auth token is valid and handle both sync and async callbacks.""" try: - auth_token = await self.auth_callback() + if inspect.iscoroutinefunction(self.auth_callback): + auth_token = await self.auth_callback() + else: + auth_token = self.auth_callback() except Exception as e: logger.error(f"Failed to retrieve the client auth token with message: {e!s}") - raise FunctionExecutionException(f"Failed to retrieve the client auth token with messages: {e!s}") from e + raise FunctionExecutionException(f"Failed to retrieve the client auth token with message: {e!s}") from e return auth_token @@ -152,13 +175,11 @@ async def execute_code(self, code: Annotated[str, "The valid Python code to exec logger.info(f"Executing Python code: {code}") - self.http_client.headers.update( - { - "Authorization": f"Bearer {auth_token}", - "Content-Type": "application/json", - USER_AGENT: SESSIONS_USER_AGENT, - } - ) + self.http_client.headers.update({ + "Authorization": f"Bearer {auth_token}", + "Content-Type": "application/json", + USER_AGENT: SESSIONS_USER_AGENT, + }) self.settings.python_code = code @@ -213,12 +234,10 @@ async def upload_file( remote_file_path = self._construct_remote_file_path(remote_file_path or os.path.basename(local_file_path)) auth_token = await self._ensure_auth_token() - self.http_client.headers.update( - { - "Authorization": f"Bearer {auth_token}", - USER_AGENT: SESSIONS_USER_AGENT, - } - ) + self.http_client.headers.update({ + "Authorization": f"Bearer {auth_token}", + USER_AGENT: SESSIONS_USER_AGENT, + }) url = self._build_url_with_version( base_url=str(self.pool_management_endpoint), @@ -247,12 +266,10 @@ async def list_files(self) -> list[SessionsRemoteFileMetadata]: list[SessionsRemoteFileMetadata]: The metadata for the files in the session pool """ auth_token = await self._ensure_auth_token() - self.http_client.headers.update( - { - "Authorization": f"Bearer {auth_token}", - USER_AGENT: SESSIONS_USER_AGENT, - } - ) + self.http_client.headers.update({ + "Authorization": f"Bearer {auth_token}", + USER_AGENT: SESSIONS_USER_AGENT, + }) url = self._build_url_with_version( base_url=str(self.pool_management_endpoint), @@ -290,12 +307,10 @@ async def download_file( BufferedReader: The data of the downloaded file. """ auth_token = await self._ensure_auth_token() - self.http_client.headers.update( - { - "Authorization": f"Bearer {auth_token}", - USER_AGENT: SESSIONS_USER_AGENT, - } - ) + self.http_client.headers.update({ + "Authorization": f"Bearer {auth_token}", + USER_AGENT: SESSIONS_USER_AGENT, + }) url = self._build_url_with_version( base_url=str(self.pool_management_endpoint), diff --git a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py index c6bd6ee56aeb..d7e4195f49a5 100644 --- a/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py +++ b/python/semantic_kernel/core_plugins/sessions_python_tool/sessions_python_settings.py @@ -8,7 +8,9 @@ from pydantic import Field, field_validator +from semantic_kernel.exceptions.function_exceptions import PluginInitializationError from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseModel, KernelBaseSettings +from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token class CodeInputType(str, Enum): @@ -46,6 +48,7 @@ class ACASessionsSettings(KernelBaseSettings): env_prefix: ClassVar[str] = "ACA_" pool_management_endpoint: HttpsUrl + token_endpoint: str = "https://acasessions.io/.default" @field_validator("pool_management_endpoint", mode="before") @classmethod @@ -60,3 +63,25 @@ def _validate_endpoint(cls, endpoint: str) -> str: else: endpoint_parsed["path"] = "/" return str(urlunsplit(endpoint_parsed.values())) + + def get_sessions_auth_token(self, token_endpoint: str | None = None) -> str | None: + """Retrieve a Microsoft Entra Auth Token for a given token endpoint for the use with an Azure Container App. + + The required role for the token is `Azure ContainerApps Session Executor and Contributor`. + The token endpoint may be specified as an environment variable, via the .env + file or as an argument. If the token endpoint is not provided, the default is None. + The `token_endpoint` argument takes precedence over the `token_endpoint` attribute. + + Args: + token_endpoint: The token endpoint to use. Defaults to `https://acasessions.io/.default`. + + Returns: + The Azure token or None if the token could not be retrieved. + + Raises: + ServiceInitializationError: If the token endpoint is not provided. + """ + endpoint_to_use = token_endpoint or self.token_endpoint + if endpoint_to_use is None: + raise PluginInitializationError("Please provide a token endpoint to retrieve the authentication token.") + return get_entra_auth_token(endpoint_to_use) diff --git a/python/semantic_kernel/functions/kernel_arguments.py b/python/semantic_kernel/functions/kernel_arguments.py index f6fa8060fe71..573b512d1f1d 100644 --- a/python/semantic_kernel/functions/kernel_arguments.py +++ b/python/semantic_kernel/functions/kernel_arguments.py @@ -43,3 +43,9 @@ def __init__( else: settings_dict = {settings.service_id or DEFAULT_SERVICE_NAME: settings} self.execution_settings: dict[str, "PromptExecutionSettings"] | None = settings_dict + + def __bool__(self) -> bool: + """Returns True if the arguments have any values.""" + has_arguments = self.__len__() > 0 + has_execution_settings = self.execution_settings is not None and len(self.execution_settings) > 0 + return has_arguments or has_execution_settings diff --git a/python/semantic_kernel/functions/kernel_function.py b/python/semantic_kernel/functions/kernel_function.py index 9b7f2a1eb317..b5d541a758ad 100644 --- a/python/semantic_kernel/functions/kernel_function.py +++ b/python/semantic_kernel/functions/kernel_function.py @@ -1,17 +1,22 @@ # Copyright (c) Microsoft. All rights reserved. import logging +import time from abc import abstractmethod from collections.abc import AsyncGenerator, Callable from copy import copy, deepcopy from inspect import isasyncgen, isgenerator from typing import TYPE_CHECKING, Any +from opentelemetry import metrics, trace +from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE + from semantic_kernel.filters.filter_types import FilterTypes from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext from semantic_kernel.filters.kernel_filters_extension import _rebuild_function_invocation_context from semantic_kernel.functions.function_result import FunctionResult from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_log_messages import KernelFunctionLogMessages from semantic_kernel.functions.kernel_function_metadata import KernelFunctionMetadata from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata from semantic_kernel.kernel_pydantic import KernelBaseModel @@ -34,8 +39,11 @@ from semantic_kernel.prompt_template.prompt_template_base import PromptTemplateBase from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig +# Logger, tracer and meter for observability logger: logging.Logger = logging.getLogger(__name__) - +tracer: trace.Tracer = trace.get_tracer(__name__) +meter: metrics.Meter = metrics.get_meter_provider().get_meter(__name__) +MEASUREMENT_FUNCTION_TAG_NAME: str = "semantic_kernel.function.name" TEMPLATE_FORMAT_MAP = { KERNEL_TEMPLATE_FORMAT_NAME: KernelPromptTemplate, @@ -67,6 +75,17 @@ class KernelFunction(KernelBaseModel): metadata: KernelFunctionMetadata + invocation_duration_histogram: metrics.Histogram = meter.create_histogram( + "semantic_kernel.function.invocation.duration", + unit="s", + description="Measures the duration of a function's execution", + ) + streaming_duration_histogram: metrics.Histogram = meter.create_histogram( + "semantic_kernel.function.streaming.duration", + unit="s", + description="Measures the duration of a function's streaming execution", + ) + @classmethod def from_prompt( cls, @@ -204,13 +223,30 @@ async def invoke( _rebuild_function_invocation_context() function_context = FunctionInvocationContext(function=self, kernel=kernel, arguments=arguments) - stack = kernel.construct_call_stack( - filter_type=FilterTypes.FUNCTION_INVOCATION, - inner_function=self._invoke_internal, - ) - await stack(function_context) - - return function_context.result + with tracer.start_as_current_span(self.fully_qualified_name) as current_span: + KernelFunctionLogMessages.log_function_invoking(logger, self.fully_qualified_name) + KernelFunctionLogMessages.log_function_arguments(logger, arguments) + + attributes = {MEASUREMENT_FUNCTION_TAG_NAME: self.fully_qualified_name} + starting_time_stamp = time.perf_counter() + try: + stack = kernel.construct_call_stack( + filter_type=FilterTypes.FUNCTION_INVOCATION, + inner_function=self._invoke_internal, + ) + await stack(function_context) + + KernelFunctionLogMessages.log_function_invoked_success(logger, self.fully_qualified_name) + KernelFunctionLogMessages.log_function_result_value(logger, function_context.result) + + return function_context.result + except Exception as e: + self._handle_exception(current_span, e, attributes) + raise e + finally: + duration = time.perf_counter() - starting_time_stamp + self.invocation_duration_histogram.record(duration, attributes) + KernelFunctionLogMessages.log_function_completed(logger, duration) @abstractmethod async def _invoke_internal_stream(self, context: FunctionInvocationContext) -> None: @@ -247,21 +283,35 @@ async def invoke_stream( _rebuild_function_invocation_context() function_context = FunctionInvocationContext(function=self, kernel=kernel, arguments=arguments) - stack = kernel.construct_call_stack( - filter_type=FilterTypes.FUNCTION_INVOCATION, - inner_function=self._invoke_internal_stream, - ) - await stack(function_context) - - if function_context.result is not None: - if isasyncgen(function_context.result.value): - async for partial in function_context.result.value: - yield partial - elif isgenerator(function_context.result.value): - for partial in function_context.result.value: - yield partial - else: - yield function_context.result + with tracer.start_as_current_span(self.fully_qualified_name) as current_span: + KernelFunctionLogMessages.log_function_streaming_invoking(logger, self.fully_qualified_name) + KernelFunctionLogMessages.log_function_arguments(logger, arguments) + + attributes = {MEASUREMENT_FUNCTION_TAG_NAME: self.fully_qualified_name} + starting_time_stamp = time.perf_counter() + try: + stack = kernel.construct_call_stack( + filter_type=FilterTypes.FUNCTION_INVOCATION, + inner_function=self._invoke_internal_stream, + ) + await stack(function_context) + + if function_context.result is not None: + if isasyncgen(function_context.result.value): + async for partial in function_context.result.value: + yield partial + elif isgenerator(function_context.result.value): + for partial in function_context.result.value: + yield partial + else: + yield function_context.result + except Exception as e: + self._handle_exception(current_span, e, attributes) + raise e + finally: + duration = time.perf_counter() - starting_time_stamp + self.streaming_duration_histogram.record(duration, attributes) + KernelFunctionLogMessages.log_function_streaming_completed(logger, duration) def function_copy(self, plugin_name: str | None = None) -> "KernelFunction": """Copy the function, can also override the plugin_name. @@ -277,3 +327,19 @@ def function_copy(self, plugin_name: str | None = None) -> "KernelFunction": if plugin_name: cop.metadata.plugin_name = plugin_name return cop + + def _handle_exception(self, current_span: trace.Span, exception: Exception, attributes: dict[str, str]) -> None: + """Handle the exception. + + Args: + current_span (trace.Span): The current span. + exception (Exception): The exception. + attributes (Attributes): The attributes to be modified. + """ + attributes[ERROR_TYPE] = type(exception).__name__ + + current_span.record_exception(exception) + current_span.set_attribute(ERROR_TYPE, type(exception).__name__) + current_span.set_status(trace.StatusCode.ERROR, description=str(exception)) + + KernelFunctionLogMessages.log_function_error(logger, exception) diff --git a/python/semantic_kernel/functions/kernel_function_log_messages.py b/python/semantic_kernel/functions/kernel_function_log_messages.py new file mode 100644 index 000000000000..a41e7f9adcc4 --- /dev/null +++ b/python/semantic_kernel/functions/kernel_function_log_messages.py @@ -0,0 +1,63 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from logging import Logger + +from semantic_kernel.functions.function_result import FunctionResult +from semantic_kernel.functions.kernel_arguments import KernelArguments + + +class KernelFunctionLogMessages: + """Kernel function log messages. + + This class contains static methods to log messages related to kernel functions. + """ + + @staticmethod + def log_function_invoking(logger: Logger, kernel_function_name: str): + """Log message when a kernel function is invoked.""" + logger.info("Function %s invoking.", kernel_function_name) + + @staticmethod + def log_function_arguments(logger: Logger, arguments: KernelArguments): + """Log message when a kernel function is invoked.""" + logger.debug("Function arguments: %s", arguments) + + @staticmethod + def log_function_invoked_success(logger: Logger, kernel_function_name: str): + """Log message when a kernel function is invoked successfully.""" + logger.info("Function %s succeeded.", kernel_function_name) + + @staticmethod + def log_function_result_value(logger: Logger, function_result: FunctionResult | None): + """Log message when a kernel function result is returned.""" + if not logger.isEnabledFor(logging.DEBUG): + return + + if function_result is not None: + try: + logger.debug("Function result: %s", function_result) + except Exception: + logger.error("Function result: Failed to convert result value to string") + else: + logger.debug("Function result: None") + + @staticmethod + def log_function_error(logger: Logger, error: Exception): + """Log message when a kernel function fails.""" + logger.error("Function failed. Error: %s", error) + + @staticmethod + def log_function_completed(logger: Logger, duration: float): + """Log message when a kernel function is completed.""" + logger.info("Function completed. Duration: %fs", duration) + + @staticmethod + def log_function_streaming_invoking(logger: Logger, kernel_function_name: str): + """Log message when a kernel function is invoked via streaming.""" + logger.info("Function %s streaming.", kernel_function_name) + + @staticmethod + def log_function_streaming_completed(logger: Logger, duration: float): + """Log message when a kernel function is completed via streaming.""" + logger.info("Function streaming completed. Duration: %fs", duration) diff --git a/python/semantic_kernel/functions/kernel_parameter_metadata.py b/python/semantic_kernel/functions/kernel_parameter_metadata.py index 20d6151bbb0b..7ddf115e7d88 100644 --- a/python/semantic_kernel/functions/kernel_parameter_metadata.py +++ b/python/semantic_kernel/functions/kernel_parameter_metadata.py @@ -36,13 +36,18 @@ def form_schema(cls, data: Any) -> Any: @classmethod def infer_schema( - cls, type_object: type | None, parameter_type: str | None, default_value: Any, description: str | None + cls, + type_object: type | None = None, + parameter_type: str | None = None, + default_value: Any | None = None, + description: str | None = None, + structured_output: bool = False, ) -> dict[str, Any] | None: """Infer the schema for the parameter metadata.""" schema = None if type_object is not None: - schema = KernelJsonSchemaBuilder.build(type_object, description) + schema = KernelJsonSchemaBuilder.build(type_object, description, structured_output) elif parameter_type is not None: string_default = str(default_value) if default_value is not None else None if string_default and string_default.strip(): diff --git a/python/semantic_kernel/kernel.py b/python/semantic_kernel/kernel.py index 34540b0443e8..3fd5d33dcc1d 100644 --- a/python/semantic_kernel/kernel.py +++ b/python/semantic_kernel/kernel.py @@ -41,9 +41,7 @@ from semantic_kernel.utils.naming import generate_random_ascii_name if TYPE_CHECKING: - from semantic_kernel.connectors.ai.function_choice_behavior import ( - FunctionChoiceBehavior, - ) + from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.functions.kernel_function import KernelFunction @@ -239,7 +237,7 @@ async def invoke_prompt( Returns: FunctionResult | list[FunctionResult] | None: The result of the function(s) """ - if not arguments: + if arguments is None: arguments = KernelArguments(**kwargs) if not prompt: raise TemplateSyntaxError("The prompt is either null or empty.") @@ -280,7 +278,7 @@ async def invoke_prompt_stream( Returns: AsyncIterable[StreamingContentMixin]: The content of the stream of the last function provided. """ - if not arguments: + if arguments is None: arguments = KernelArguments(**kwargs) if not prompt: raise TemplateSyntaxError("The prompt is either null or empty.") diff --git a/python/semantic_kernel/schema/kernel_json_schema_builder.py b/python/semantic_kernel/schema/kernel_json_schema_builder.py index 438833fda6dc..9d19644fc25a 100644 --- a/python/semantic_kernel/schema/kernel_json_schema_builder.py +++ b/python/semantic_kernel/schema/kernel_json_schema_builder.py @@ -34,12 +34,15 @@ class KernelJsonSchemaBuilder: """Kernel JSON schema builder.""" @classmethod - def build(cls, parameter_type: type | str, description: str | None = None) -> dict[str, Any]: + def build( + cls, parameter_type: type | str | Any, description: str | None = None, structured_output: bool = False + ) -> dict[str, Any]: """Builds the JSON schema for a given parameter type and description. Args: - parameter_type (type | str): The parameter type. - description (str, optional): The description of the parameter. Defaults to None. + parameter_type: The parameter type. + description: The description of the parameter. Defaults to None. + structured_output: Whether the outputs are structured. Defaults to False. Returns: dict[str, Any]: The JSON schema for the parameter type. @@ -47,25 +50,28 @@ def build(cls, parameter_type: type | str, description: str | None = None) -> di if isinstance(parameter_type, str): return cls.build_from_type_name(parameter_type, description) if isinstance(parameter_type, KernelBaseModel): - return cls.build_model_schema(parameter_type, description) + return cls.build_model_schema(parameter_type, description, structured_output) if isinstance(parameter_type, type) and issubclass(parameter_type, Enum): return cls.build_enum_schema(parameter_type, description) if hasattr(parameter_type, "__annotations__"): - return cls.build_model_schema(parameter_type, description) + return cls.build_model_schema(parameter_type, description, structured_output) if hasattr(parameter_type, "__args__"): - return cls.handle_complex_type(parameter_type, description) + return cls.handle_complex_type(parameter_type, description, structured_output) schema = cls.get_json_schema(parameter_type) if description: schema["description"] = description return schema @classmethod - def build_model_schema(cls, model: type, description: str | None = None) -> dict[str, Any]: + def build_model_schema( + cls, model: type | KernelBaseModel, description: str | None = None, structured_output: bool = False + ) -> dict[str, Any]: """Builds the JSON schema for a given model and description. Args: - model (type): The model type. - description (str, optional): The description of the model. Defaults to None. + model: The model type. + description: The description of the model. Defaults to None. + structured_output: Whether the outputs are structured. Defaults to False. Returns: dict[str, Any]: The JSON schema for the model. @@ -88,11 +94,13 @@ def build_model_schema(cls, model: type, description: str | None = None) -> dict field_description = field_info.description if not cls._is_optional(field_type): required.append(field_name) - properties[field_name] = cls.build(field_type, field_description) + properties[field_name] = cls.build(field_type, field_description, structured_output) schema = {"type": "object", "properties": properties} if required: schema["required"] = required + if structured_output: + schema["additionalProperties"] = False # type: ignore if description: schema["description"] = description @@ -140,12 +148,15 @@ def get_json_schema(cls, parameter_type: type) -> dict[str, Any]: return {"type": type_name} @classmethod - def handle_complex_type(cls, parameter_type: type, description: str | None = None) -> dict[str, Any]: + def handle_complex_type( + cls, parameter_type: type, description: str | None = None, structured_output: bool = False + ) -> dict[str, Any]: """Handles building the JSON schema for complex types. Args: - parameter_type (type): The parameter type. - description (str, optional): The description of the parameter. Defaults to None. + parameter_type: The parameter type. + description: The description of the parameter. Defaults to None. + structured_output: Whether the outputs are structured. Defaults to False. Returns: dict[str, Any]: The JSON schema for the parameter type. @@ -156,39 +167,50 @@ def handle_complex_type(cls, parameter_type: type, description: str | None = Non schema: dict[str, Any] = {} if origin is list or origin is set: item_type = args[0] - schema = {"type": "array", "items": cls.build(item_type)} + schema = { + "type": "array", + "items": cls.build(item_type, structured_output=structured_output), + } if description: schema["description"] = description return schema if origin is dict: _, value_type = args - additional_properties = cls.build(value_type) + additional_properties = cls.build(value_type, structured_output=structured_output) if additional_properties == {"type": "object"}: additional_properties["properties"] = {} # Account for differences in Python 3.10 dict schema = {"type": "object", "additionalProperties": additional_properties} if description: schema["description"] = description + if structured_output: + schema["additionalProperties"] = False return schema if origin is tuple: - items = [cls.build(arg) for arg in args] + items = [cls.build(arg, structured_output=structured_output) for arg in args] schema = {"type": "array", "items": items} if description: schema["description"] = description + if structured_output: + schema["additionalProperties"] = False return schema if origin in {Union, types.UnionType}: # Handle Optional[T] (Union[T, None]) by making schema nullable if len(args) == 2 and type(None) in args: non_none_type = args[0] if args[1] is type(None) else args[1] - schema = cls.build(non_none_type) + schema = cls.build(non_none_type, structured_output=structured_output) schema["type"] = [schema["type"], "null"] if description: schema["description"] = description + if structured_output: + schema["additionalProperties"] = False return schema - schemas = [cls.build(arg, description) for arg in args] + schemas = [cls.build(arg, description, structured_output=structured_output) for arg in args] return {"anyOf": schemas} schema = cls.get_json_schema(parameter_type) if description: schema["description"] = description + if structured_output: + schema["additionalProperties"] = False return schema @classmethod diff --git a/python/semantic_kernel/services/ai_service_client_base.py b/python/semantic_kernel/services/ai_service_client_base.py index 2f3b1ff22fdb..69c12ea0b1ca 100644 --- a/python/semantic_kernel/services/ai_service_client_base.py +++ b/python/semantic_kernel/services/ai_service_client_base.py @@ -54,3 +54,11 @@ def get_prompt_execution_settings_from_settings( return settings return prompt_execution_settings_type.from_prompt_execution_settings(settings) + + def service_url(self) -> str | None: + """Get the URL of the service. + + Override this in the subclass to return the proper URL. + If the service does not have a URL, return None. + """ + return None diff --git a/python/semantic_kernel/utils/authentication/__init__.py b/python/semantic_kernel/utils/authentication/__init__.py new file mode 100644 index 000000000000..646090a8113b --- /dev/null +++ b/python/semantic_kernel/utils/authentication/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.utils.authentication.entra_id_authentication import get_entra_auth_token + +__all__ = ["get_entra_auth_token"] diff --git a/python/semantic_kernel/utils/authentication/entra_id_authentication.py b/python/semantic_kernel/utils/authentication/entra_id_authentication.py new file mode 100644 index 000000000000..d33255e87189 --- /dev/null +++ b/python/semantic_kernel/utils/authentication/entra_id_authentication.py @@ -0,0 +1,38 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging + +from azure.core.exceptions import ClientAuthenticationError +from azure.identity import DefaultAzureCredential + +from semantic_kernel.exceptions.service_exceptions import ServiceInvalidAuthError + +logger: logging.Logger = logging.getLogger(__name__) + + +def get_entra_auth_token(token_endpoint: str) -> str | None: + """Retrieve a Microsoft Entra Auth Token for a given token endpoint. + + The token endpoint may be specified as an environment variable, via the .env + file or as an argument. If the token endpoint is not provided, the default is None. + + Args: + token_endpoint: The token endpoint to use to retrieve the authentication token. + + Returns: + The Azure token or None if the token could not be retrieved. + """ + if not token_endpoint: + raise ServiceInvalidAuthError( + "A token endpoint must be provided either in settings, as an environment variable, or as an argument." + ) + + credential = DefaultAzureCredential() + + try: + auth_token = credential.get_token(token_endpoint) + except ClientAuthenticationError: + logger.error(f"Failed to retrieve Azure token for the specified endpoint: `{token_endpoint}`.") + return None + + return auth_token.token if auth_token else None diff --git a/python/semantic_kernel/utils/telemetry/decorators.py b/python/semantic_kernel/utils/telemetry/decorators.py deleted file mode 100644 index 366168ae3938..000000000000 --- a/python/semantic_kernel/utils/telemetry/decorators.py +++ /dev/null @@ -1,265 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -# -# Code to trace model activities with the OTel semantic conventions. -# This code contains experimental features and may change in the future. -# To enable these features, set one of the following senvironment variables to true: -# SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS -# SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE - -import functools -import json -import os -from collections.abc import Callable -from typing import Any - -from opentelemetry.trace import Span, StatusCode, get_tracer, use_span - -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.utils.telemetry.const import ( - CHAT_COMPLETION_OPERATION, - COMPLETION_EVENT, - COMPLETION_EVENT_COMPLETION, - COMPLETION_TOKENS, - ERROR_TYPE, - FINISH_REASON, - MAX_TOKENS, - MODEL, - OPERATION, - PROMPT_EVENT, - PROMPT_EVENT_PROMPT, - PROMPT_TOKENS, - RESPONSE_ID, - SYSTEM, - TEMPERATURE, - TEXT_COMPLETION_OPERATION, - TOP_P, -) - -OTEL_ENABLED_ENV_VAR = "SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS" -OTEL_SENSITIVE_ENABLED_ENV_VAR = "SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE" - - -_enable_diagnostics = os.getenv(OTEL_ENABLED_ENV_VAR, "false").lower() in ("true", "1", "t") -_enable_sensitive_events = os.getenv(OTEL_SENSITIVE_ENABLED_ENV_VAR, "false").lower() in ("true", "1", "t") - -# Creates a tracer from the global tracer provider -tracer = get_tracer(__name__) - - -def are_model_diagnostics_enabled() -> bool: - """Check if model diagnostics are enabled. - - Model diagnostics are enabled if either _enable_diagnostics or _enable_sensitive_events is set. - """ - return _enable_diagnostics or _enable_sensitive_events - - -def are_sensitive_events_enabled() -> bool: - """Check if sensitive events are enabled. - - Sensitive events are enabled if _enable_sensitive_events is set. - """ - return _enable_sensitive_events - - -def trace_chat_completion(model_provider: str) -> Callable: - """Decorator to trace chat completion activities.""" - - def inner_trace_chat_completion(completion_func: Callable) -> Callable: - @functools.wraps(completion_func) - async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[ChatMessageContent]: - chat_history: ChatHistory = kwargs["chat_history"] - settings: PromptExecutionSettings = kwargs["settings"] - - model_name = getattr(settings, "ai_model_id", None) or getattr(args[0], "ai_model_id", None) or "unknown" - - formatted_messages = ( - _messages_to_openai_format(chat_history.messages) if are_sensitive_events_enabled() else None - ) - span = _start_completion_activity( - CHAT_COMPLETION_OPERATION, model_name, model_provider, formatted_messages, settings - ) - - try: - completions: list[ChatMessageContent] = await completion_func(*args, **kwargs) - except Exception as exception: - if span: - _set_completion_error(span, exception) - span.end() - raise - - if span and completions: - with use_span(span, end_on_exit=True): - first_completion = completions[0] - response_id = first_completion.metadata.get("id") or (first_completion.inner_content or {}).get( - "id" - ) - usage = first_completion.metadata.get("usage", None) - prompt_tokens = getattr(usage, "prompt_tokens", None) - completion_tokens = getattr(usage, "completion_tokens", None) - - completion_text: str | None = ( - _messages_to_openai_format(completions) if are_sensitive_events_enabled() else None - ) - - finish_reasons: list[str] = [str(completion.finish_reason) for completion in completions] - - _set_completion_response( - span, - completion_text, - finish_reasons, - response_id or "unknown", - prompt_tokens, - completion_tokens, - ) - - return completions - - return wrapper_decorator - - return inner_trace_chat_completion - - -def trace_text_completion(model_provider: str) -> Callable: - """Decorator to trace text completion activities.""" - - def inner_trace_text_completion(completion_func: Callable) -> Callable: - @functools.wraps(completion_func) - async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[TextContent]: - prompt: str = kwargs["prompt"] - settings: PromptExecutionSettings = kwargs["settings"] - - model_name = getattr(settings, "ai_model_id", None) or getattr(args[0], "ai_model_id", None) or "unknown" - - span = _start_completion_activity(TEXT_COMPLETION_OPERATION, model_name, model_provider, prompt, settings) - - try: - completions: list[TextContent] = await completion_func(*args, **kwargs) - except Exception as exception: - if span: - _set_completion_error(span, exception) - span.end() - raise - - if span and completions: - with use_span(span, end_on_exit=True): - first_completion = completions[0] - response_id = first_completion.metadata.get("id") or (first_completion.inner_content or {}).get( - "id" - ) - usage = first_completion.metadata.get("usage", None) - prompt_tokens = getattr(usage, "prompt_tokens", None) - completion_tokens = getattr(usage, "completion_tokens", None) - - completion_text: str | None = ( - json.dumps([completion.text for completion in completions]) - if are_sensitive_events_enabled() - else None - ) - - _set_completion_response( - span, - completion_text, - None, - response_id or "unknown", - prompt_tokens, - completion_tokens, - ) - - return completions - - return wrapper_decorator - - return inner_trace_text_completion - - -def _start_completion_activity( - operation_name: str, - model_name: str, - model_provider: str, - prompt: str | None, - execution_settings: PromptExecutionSettings | None, -) -> Span | None: - """Start a text or chat completion activity for a given model.""" - if not are_model_diagnostics_enabled(): - return None - - span = tracer.start_span(f"{operation_name} {model_name}") - - # Set attributes on the span - span.set_attributes( - { - OPERATION: operation_name, - SYSTEM: model_provider, - MODEL: model_name, - } - ) - - # TODO(@glahaye): we'll need to have a way to get these attributes from model - # providers other than OpenAI (for example if the attributes are named differently) - if execution_settings: - attribute = execution_settings.extension_data.get("max_tokens") - if attribute: - span.set_attribute(MAX_TOKENS, attribute) - - attribute = execution_settings.extension_data.get("temperature") - if attribute: - span.set_attribute(TEMPERATURE, attribute) - - attribute = execution_settings.extension_data.get("top_p") - if attribute: - span.set_attribute(TOP_P, attribute) - - if are_sensitive_events_enabled() and prompt: - span.add_event(PROMPT_EVENT, {PROMPT_EVENT_PROMPT: prompt}) - - return span - - -def _set_completion_response( - span: Span, - completion_text: str | None, - finish_reasons: list[str] | None, - response_id: str, - prompt_tokens: int | None = None, - completion_tokens: int | None = None, -) -> None: - """Set the a text or chat completion response for a given activity.""" - if not are_model_diagnostics_enabled(): - return - - span.set_attribute(RESPONSE_ID, response_id) - - if finish_reasons: - span.set_attribute(FINISH_REASON, ",".join(finish_reasons)) - - if prompt_tokens: - span.set_attribute(PROMPT_TOKENS, prompt_tokens) - - if completion_tokens: - span.set_attribute(COMPLETION_TOKENS, completion_tokens) - - if are_sensitive_events_enabled() and completion_text: - span.add_event(COMPLETION_EVENT, {COMPLETION_EVENT_COMPLETION: completion_text}) - - -def _set_completion_error(span: Span, error: Exception) -> None: - """Set an error for a text or chat completion .""" - if not are_model_diagnostics_enabled(): - return - - span.set_attribute(ERROR_TYPE, str(type(error))) - - span.set_status(StatusCode.ERROR, repr(error)) - - -def _messages_to_openai_format(messages: list[ChatMessageContent]) -> str: - """Convert a list of ChatMessageContent to a string in the OpenAI format. - - OpenTelemetry recommends formatting the messages in the OpenAI format - regardless of the actual model being used. - """ - return json.dumps([message.to_dict() for message in messages]) diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/__init__.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/__init__.py new file mode 100644 index 000000000000..52d1e7614768 --- /dev/null +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + trace_chat_completion, + trace_streaming_chat_completion, + trace_streaming_text_completion, + trace_text_completion, +) + +__all__ = [ + "trace_chat_completion", + "trace_streaming_chat_completion", + "trace_streaming_text_completion", + "trace_text_completion", +] diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py new file mode 100644 index 000000000000..5a0dda7cfca9 --- /dev/null +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/decorators.py @@ -0,0 +1,402 @@ +# Copyright (c) Microsoft. All rights reserved. + +import functools +import json +from collections.abc import AsyncGenerator, Callable +from functools import reduce +from typing import TYPE_CHECKING, Any + +from opentelemetry.trace import Span, StatusCode, get_tracer, use_span + +from semantic_kernel.connectors.ai.completion_usage import CompletionUsage +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.utils.experimental_decorator import experimental_function +from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes +from semantic_kernel.utils.telemetry.model_diagnostics.model_diagnostics_settings import ModelDiagnosticSettings + +if TYPE_CHECKING: + from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase + from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings + from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase + +# Module to instrument GenAI models using OpenTelemetry and OpenTelemetry Semantic Conventions. +# These are experimental features and may change in the future. + +# To enable these features, set one of the following environment variables to true: +# SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS +# SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE +MODEL_DIAGNOSTICS_SETTINGS = ModelDiagnosticSettings.create() + +# Operation names +CHAT_COMPLETION_OPERATION = "chat.completions" +CHAT_STREAMING_COMPLETION_OPERATION = "chat.streaming_completions" +TEXT_COMPLETION_OPERATION = "text.completions" +TEXT_STREAMING_COMPLETION_OPERATION = "text.streaming_completions" + +# Creates a tracer from the global tracer provider +tracer = get_tracer(__name__) + + +@experimental_function +def are_model_diagnostics_enabled() -> bool: + """Check if model diagnostics are enabled. + + Model diagnostics are enabled if either diagnostic is enabled or diagnostic with sensitive events is enabled. + """ + return ( + MODEL_DIAGNOSTICS_SETTINGS.enable_otel_diagnostics + or MODEL_DIAGNOSTICS_SETTINGS.enable_otel_diagnostics_sensitive + ) + + +@experimental_function +def are_sensitive_events_enabled() -> bool: + """Check if sensitive events are enabled. + + Sensitive events are enabled if the diagnostic with sensitive events is enabled. + """ + return MODEL_DIAGNOSTICS_SETTINGS.enable_otel_diagnostics_sensitive + + +@experimental_function +def trace_chat_completion(model_provider: str) -> Callable: + """Decorator to trace chat completion activities. + + Args: + model_provider (str): The model provider should describe a family of + GenAI models with specific model identified by ai_model_id. For example, + model_provider could be "openai" and ai_model_id could be "gpt-3.5-turbo". + Sometimes the model provider is unknown at runtime, in which case it can be + set to the most specific known provider. For example, while using local models + hosted by Ollama, the model provider could be set to "ollama". + """ + + def inner_trace_chat_completion(completion_func: Callable) -> Callable: + @functools.wraps(completion_func) + async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[ChatMessageContent]: + if not are_model_diagnostics_enabled(): + # If model diagnostics are not enabled, just return the completion + return await completion_func(*args, **kwargs) + + completion_service: "ChatCompletionClientBase" = args[0] + chat_history: ChatHistory = ( + kwargs.get("chat_history") if kwargs.get("chat_history") is not None else args[1] + ) + settings: "PromptExecutionSettings" = ( + kwargs.get("settings") if kwargs.get("settings") is not None else args[2] + ) + + with use_span( + _start_completion_activity( + CHAT_COMPLETION_OPERATION, + completion_service.ai_model_id, + model_provider, + completion_service.service_url(), + chat_history, + settings, + ), + end_on_exit=True, + ) as current_span: + try: + completions: list[ChatMessageContent] = await completion_func(*args, **kwargs) + _set_completion_response(current_span, completions) + return completions + except Exception as exception: + _set_completion_error(current_span, exception) + raise + + # Mark the wrapper decorator as a chat completion decorator + wrapper_decorator.__model_diagnostics_chat_completion__ = True # type: ignore + + return wrapper_decorator + + return inner_trace_chat_completion + + +@experimental_function +def trace_streaming_chat_completion(model_provider: str) -> Callable: + """Decorator to trace streaming chat completion activities. + + Args: + model_provider (str): The model provider should describe a family of + GenAI models with specific model identified by ai_model_id. For example, + model_provider could be "openai" and ai_model_id could be "gpt-3.5-turbo". + Sometimes the model provider is unknown at runtime, in which case it can be + set to the most specific known provider. For example, while using local models + hosted by Ollama, the model provider could be set to "ollama". + """ + + def inner_trace_streaming_chat_completion(completion_func: Callable) -> Callable: + @functools.wraps(completion_func) + async def wrapper_decorator( + *args: Any, **kwargs: Any + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + if not are_model_diagnostics_enabled(): + # If model diagnostics are not enabled, just return the completion + async for streaming_chat_message_contents in completion_func(*args, **kwargs): + yield streaming_chat_message_contents + return + + completion_service: "ChatCompletionClientBase" = args[0] + chat_history: ChatHistory = ( + kwargs.get("chat_history") if kwargs.get("chat_history") is not None else args[1] + ) + settings: "PromptExecutionSettings" = ( + kwargs.get("settings") if kwargs.get("settings") is not None else args[2] + ) + + all_messages: dict[int, list[StreamingChatMessageContent]] = {} + + with use_span( + _start_completion_activity( + CHAT_STREAMING_COMPLETION_OPERATION, + completion_service.ai_model_id, + model_provider, + completion_service.service_url(), + chat_history, + settings, + ), + end_on_exit=True, + ) as current_span: + try: + async for streaming_chat_message_contents in completion_func(*args, **kwargs): + for streaming_chat_message_content in streaming_chat_message_contents: + choice_index = streaming_chat_message_content.choice_index + if choice_index not in all_messages: + all_messages[choice_index] = [] + all_messages[choice_index].append(streaming_chat_message_content) + yield streaming_chat_message_contents + + all_messages_flattened = [ + reduce(lambda x, y: x + y, messages) for messages in all_messages.values() + ] + _set_completion_response(current_span, all_messages_flattened) + except Exception as exception: + _set_completion_error(current_span, exception) + raise + + # Mark the wrapper decorator as a streaming chat completion decorator + wrapper_decorator.__model_diagnostics_streaming_chat_completion__ = True # type: ignore + return wrapper_decorator + + return inner_trace_streaming_chat_completion + + +@experimental_function +def trace_text_completion(model_provider: str) -> Callable: + """Decorator to trace text completion activities. + + Args: + model_provider (str): The model provider should describe a family of + GenAI models with specific model identified by ai_model_id. For example, + model_provider could be "openai" and ai_model_id could be "gpt-3.5-turbo". + Sometimes the model provider is unknown at runtime, in which case it can be + set to the most specific known provider. For example, while using local models + hosted by Ollama, the model provider could be set to "ollama". + """ + + def inner_trace_text_completion(completion_func: Callable) -> Callable: + @functools.wraps(completion_func) + async def wrapper_decorator(*args: Any, **kwargs: Any) -> list[TextContent]: + if not are_model_diagnostics_enabled(): + # If model diagnostics are not enabled, just return the completion + return await completion_func(*args, **kwargs) + + completion_service: "TextCompletionClientBase" = args[0] + prompt: str = kwargs.get("prompt") if kwargs.get("prompt") is not None else args[1] + settings: "PromptExecutionSettings" = kwargs["settings"] if kwargs.get("settings") is not None else args[2] + + with use_span( + _start_completion_activity( + TEXT_COMPLETION_OPERATION, + completion_service.ai_model_id, + model_provider, + completion_service.service_url(), + prompt, + settings, + ), + end_on_exit=True, + ) as current_span: + try: + completions: list[TextContent] = await completion_func(*args, **kwargs) + _set_completion_response(current_span, completions) + return completions + except Exception as exception: + _set_completion_error(current_span, exception) + raise + + # Mark the wrapper decorator as a text completion decorator + wrapper_decorator.__model_diagnostics_text_completion__ = True # type: ignore + + return wrapper_decorator + + return inner_trace_text_completion + + +@experimental_function +def trace_streaming_text_completion(model_provider: str) -> Callable: + """Decorator to trace streaming text completion activities. + + Args: + model_provider (str): The model provider should describe a family of + GenAI models with specific model identified by ai_model_id. For example, + model_provider could be "openai" and ai_model_id could be "gpt-3.5-turbo". + Sometimes the model provider is unknown at runtime, in which case it can be + set to the most specific known provider. For example, while using local models + hosted by Ollama, the model provider could be set to "ollama". + """ + + def inner_trace_streaming_text_completion(completion_func: Callable) -> Callable: + @functools.wraps(completion_func) + async def wrapper_decorator(*args: Any, **kwargs: Any) -> AsyncGenerator[list["StreamingTextContent"], Any]: + if not are_model_diagnostics_enabled(): + # If model diagnostics are not enabled, just return the completion + async for streaming_text_contents in completion_func(*args, **kwargs): + yield streaming_text_contents + return + + completion_service: "TextCompletionClientBase" = args[0] + prompt: str = kwargs.get("prompt") if kwargs.get("prompt") is not None else args[1] + settings: "PromptExecutionSettings" = kwargs["settings"] if kwargs.get("settings") is not None else args[2] + + all_text_contents: dict[int, list["StreamingTextContent"]] = {} + + with use_span( + _start_completion_activity( + TEXT_STREAMING_COMPLETION_OPERATION, + completion_service.ai_model_id, + model_provider, + completion_service.service_url(), + prompt, + settings, + ), + end_on_exit=True, + ) as current_span: + try: + async for streaming_text_contents in completion_func(*args, **kwargs): + for streaming_text_content in streaming_text_contents: + choice_index = streaming_text_content.choice_index + if choice_index not in all_text_contents: + all_text_contents[choice_index] = [] + all_text_contents[choice_index].append(streaming_text_content) + yield streaming_text_contents + + all_text_contents_flattened = [ + reduce(lambda x, y: x + y, messages) for messages in all_text_contents.values() + ] + _set_completion_response(current_span, all_text_contents_flattened) + except Exception as exception: + _set_completion_error(current_span, exception) + raise + + # Mark the wrapper decorator as a streaming text completion decorator + wrapper_decorator.__model_diagnostics_streaming_text_completion__ = True # type: ignore + return wrapper_decorator + + return inner_trace_streaming_text_completion + + +def _start_completion_activity( + operation_name: str, + model_name: str, + model_provider: str, + service_url: str | None, + prompt: str | ChatHistory, + execution_settings: "PromptExecutionSettings | None", +) -> Span: + """Start a text or chat completion activity for a given model.""" + span = tracer.start_span(f"{operation_name} {model_name}") + + # Set attributes on the span + span.set_attributes({ + gen_ai_attributes.OPERATION: operation_name, + gen_ai_attributes.SYSTEM: model_provider, + gen_ai_attributes.MODEL: model_name, + }) + + if service_url: + span.set_attribute(gen_ai_attributes.ADDRESS, service_url) + + # TODO(@glahaye): we'll need to have a way to get these attributes from model + # providers other than OpenAI (for example if the attributes are named differently) + if execution_settings: + attribute = execution_settings.extension_data.get("max_tokens") + if attribute: + span.set_attribute(gen_ai_attributes.MAX_TOKENS, attribute) + + attribute = execution_settings.extension_data.get("temperature") + if attribute: + span.set_attribute(gen_ai_attributes.TEMPERATURE, attribute) + + attribute = execution_settings.extension_data.get("top_p") + if attribute: + span.set_attribute(gen_ai_attributes.TOP_P, attribute) + + if are_sensitive_events_enabled(): + if isinstance(prompt, ChatHistory): + prompt = _messages_to_openai_format(prompt.messages) + span.add_event(gen_ai_attributes.PROMPT_EVENT, {gen_ai_attributes.PROMPT_EVENT_PROMPT: prompt}) + + return span + + +def _set_completion_response( + current_span: Span, + completions: list[ChatMessageContent] + | list[TextContent] + | list[StreamingChatMessageContent] + | list[StreamingTextContent], +) -> None: + """Set the a text or chat completion response for a given activity.""" + first_completion = completions[0] + + # Set the response ID + response_id = first_completion.metadata.get("id") + if response_id: + current_span.set_attribute(gen_ai_attributes.RESPONSE_ID, response_id) + + # Set the finish reason + finish_reasons = [ + str(completion.finish_reason) for completion in completions if isinstance(completion, ChatMessageContent) + ] + if finish_reasons: + current_span.set_attribute(gen_ai_attributes.FINISH_REASON, ",".join(finish_reasons)) + + # Set usage attributes + usage = first_completion.metadata.get("usage", None) + if isinstance(usage, CompletionUsage): + if usage.prompt_tokens: + current_span.set_attribute(gen_ai_attributes.PROMPT_TOKENS, usage.prompt_tokens) + if usage.completion_tokens: + current_span.set_attribute(gen_ai_attributes.COMPLETION_TOKENS, usage.completion_tokens) + + # Set the completion event + if are_sensitive_events_enabled(): + completion_text: str = _messages_to_openai_format(completions) + current_span.add_event( + gen_ai_attributes.COMPLETION_EVENT, {gen_ai_attributes.COMPLETION_EVENT_COMPLETION: completion_text} + ) + + +def _set_completion_error(span: Span, error: Exception) -> None: + """Set an error for a text or chat completion .""" + span.set_attribute(gen_ai_attributes.ERROR_TYPE, str(type(error))) + span.set_status(StatusCode.ERROR, repr(error)) + + +def _messages_to_openai_format( + messages: list[ChatMessageContent] + | list[StreamingChatMessageContent] + | list[TextContent] + | list[StreamingTextContent], +) -> str: + """Convert a list of ChatMessageContent to a string in the OpenAI format. + + OpenTelemetry recommends formatting the messages in the OpenAI format + regardless of the actual model being used. + """ + return json.dumps([message.to_dict() for message in messages]) diff --git a/python/semantic_kernel/utils/telemetry/const.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/gen_ai_attributes.py similarity index 77% rename from python/semantic_kernel/utils/telemetry/const.py rename to python/semantic_kernel/utils/telemetry/model_diagnostics/gen_ai_attributes.py index 5c74f708b986..461500a998cb 100644 --- a/python/semantic_kernel/utils/telemetry/const.py +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/gen_ai_attributes.py @@ -1,12 +1,13 @@ # Copyright (c) Microsoft. All rights reserved. -# + # Constants for tracing activities with semantic conventions. +# Ideally, we should use the attributes from the semcov package. +# However, many of the attributes are not yet available in the package, +# so we define them here for now. # Activity tags SYSTEM = "gen_ai.system" OPERATION = "gen_ai.operation.name" -CHAT_COMPLETION_OPERATION = "chat.completions" -TEXT_COMPLETION_OPERATION = "text.completions" MODEL = "gen_ai.request.model" MAX_TOKENS = "gen_ai.request.max_tokens" # nosec TEMPERATURE = "gen_ai.request.temperature" @@ -26,3 +27,6 @@ # Activity event attributes PROMPT_EVENT_PROMPT = "gen_ai.prompt" COMPLETION_EVENT_COMPLETION = "gen_ai.completion" + +# Kernel specific attributes +AVAILABLE_FUNCTIONS = "sk.available_functions" diff --git a/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py b/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py new file mode 100644 index 000000000000..f7e509a21b26 --- /dev/null +++ b/python/semantic_kernel/utils/telemetry/model_diagnostics/model_diagnostics_settings.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import ClassVar + +from semantic_kernel.kernel_pydantic import KernelBaseSettings +from semantic_kernel.utils.experimental_decorator import experimental_class + + +@experimental_class +class ModelDiagnosticSettings(KernelBaseSettings): + """Settings for model diagnostics. + + The settings are first loaded from environment variables with + the prefix 'AZURE_AI_INFERENCE_'. + If the environment variables are not found, the settings can + be loaded from a .env file with the encoding 'utf-8'. + If the settings are not found in the .env file, the settings + are ignored; however, validation will fail alerting that the + settings are missing. + + Required settings for prefix 'SEMANTICKERNEL_EXPERIMENTAL_GENAI_' are: + - enable_otel_diagnostics: bool - Enable OpenTelemetry diagnostics. Default is False. + (Env var SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS) + - enable_otel_diagnostics_sensitive: bool - Enable OpenTelemetry sensitive events. Default is False. + (Env var SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE) + """ + + env_prefix: ClassVar[str] = "SEMANTICKERNEL_EXPERIMENTAL_GENAI_" + + enable_otel_diagnostics: bool = False + enable_otel_diagnostics_sensitive: bool = False diff --git a/python/setup_dev.sh b/python/setup_dev.sh deleted file mode 100644 index 98a642d3953b..000000000000 --- a/python/setup_dev.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -# this assumes Poetry is installed and in the Path, see https://python-poetry.org/docs/#installing-with-the-official-installer -# on macos run with `source ./setup_dev.sh` -poetry install -poetry run pre-commit install -poetry run pre-commit autoupdate diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 692c9c759ab1..19173f86b102 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -161,6 +161,11 @@ def chat_history() -> "ChatHistory": return ChatHistory() +@fixture(scope="function") +def prompt() -> str: + return "test prompt" + + # @fixture(autouse=True) # def enable_debug_mode(): # """Set `autouse=True` to enable easy debugging for tests. @@ -229,6 +234,7 @@ def azure_openai_unit_test_env(monkeypatch, exclude_list, override_env_param_dic "AZURE_OPENAI_ENDPOINT": "https://test-endpoint.com", "AZURE_OPENAI_API_VERSION": "2023-03-15-preview", "AZURE_OPENAI_BASE_URL": "https://test_text_deployment.test-base-url.com", + "AZURE_OPENAI_TOKEN_ENDPOINT": "https://test-token-endpoint.com", } env_vars.update(override_env_param_dict) @@ -306,10 +312,7 @@ def anthropic_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): if override_env_param_dict is None: override_env_param_dict = {} - env_vars = { - "ANTHROPIC_CHAT_MODEL_ID": "test_chat_model_id", - "ANTHROPIC_API_KEY": "test_api_key" - } + env_vars = {"ANTHROPIC_CHAT_MODEL_ID": "test_chat_model_id", "ANTHROPIC_API_KEY": "test_api_key"} env_vars.update(override_env_param_dict) @@ -422,6 +425,28 @@ def google_search_unit_test_env(monkeypatch, exclude_list, override_env_param_di return env_vars +@fixture +def postgres_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): + """Fixture to set environment variables for Postgres connector.""" + if exclude_list is None: + exclude_list = [] + + if override_env_param_dict is None: + override_env_param_dict = {} + + env_vars = {"POSTGRES_CONNECTION_STRING": "host=localhost port=5432 dbname=postgres user=testuser password=example"} + + env_vars.update(override_env_param_dict) + + for key, value in env_vars.items(): + if key not in exclude_list: + monkeypatch.setenv(key, value) + else: + monkeypatch.delenv(key, raising=False) + + return env_vars + + @fixture def qdrant_unit_test_env(monkeypatch, exclude_list, override_env_param_dict): """Fixture to set environment variables for QdrantConnector.""" diff --git a/python/tests/integration/completions/chat_completion_test_base.py b/python/tests/integration/completions/chat_completion_test_base.py index 614928720301..d222cee41ca8 100644 --- a/python/tests/integration/completions/chat_completion_test_base.py +++ b/python/tests/integration/completions/chat_completion_test_base.py @@ -1,16 +1,16 @@ # Copyright (c) Microsoft. All rights reserved. -import os import sys from functools import reduce -from typing import Any +from typing import Annotated, Any import pytest from azure.ai.inference.aio import ChatCompletionsClient -from azure.core.credentials import AzureKeyCredential +from azure.identity import DefaultAzureCredential from openai import AsyncAzureOpenAI +from semantic_kernel.connectors.ai.anthropic import AnthropicChatCompletion from semantic_kernel.connectors.ai.azure_ai_inference.azure_ai_inference_prompt_execution_settings import ( AzureAIInferenceChatPromptExecutionSettings, ) @@ -32,6 +32,7 @@ from semantic_kernel.connectors.ai.mistral_ai.services.mistral_ai_chat_completion import MistralAIChatCompletion from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import OllamaChatPromptExecutionSettings from semantic_kernel.connectors.ai.ollama.services.ollama_chat_completion import OllamaChatCompletion +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( AzureChatPromptExecutionSettings, ) @@ -44,7 +45,10 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.core_plugins.math_plugin import MathPlugin +from semantic_kernel.exceptions import ServiceInitializationError +from semantic_kernel.functions.kernel_function_decorator import kernel_function from semantic_kernel.kernel import Kernel +from semantic_kernel.kernel_pydantic import KernelBaseModel from tests.integration.completions.completion_test_base import CompletionTestBase, ServiceType if sys.version_info >= (3, 12): @@ -54,18 +58,54 @@ mistral_ai_setup: bool = False try: - if os.environ["MISTRALAI_API_KEY"] and os.environ["MISTRALAI_CHAT_MODEL_ID"]: - mistral_ai_setup = True -except KeyError: + MistralAIChatCompletion() + mistral_ai_setup = True +except ServiceInitializationError: mistral_ai_setup = False ollama_setup: bool = False try: - if os.environ["OLLAMA_MODEL"]: - ollama_setup = True -except KeyError: + OllamaChatCompletion() + ollama_setup = True +except ServiceInitializationError: ollama_setup = False +google_ai_setup: bool = False +try: + GoogleAIChatCompletion() + google_ai_setup = True +except ServiceInitializationError: + google_ai_setup = False + +vertex_ai_setup: bool = False +try: + VertexAIChatCompletion() + vertex_ai_setup = True +except ServiceInitializationError: + vertex_ai_setup = False + +anthropic_setup: bool = False +try: + AnthropicChatCompletion() + anthropic_setup = True +except ServiceInitializationError: + anthropic_setup = False + + +# A mock plugin that contains a function that returns a complex object. +class PersonDetails(KernelBaseModel): + id: str + name: str + age: int + + +class PersonSearchPlugin: + @kernel_function(name="SearchPerson", description="Search details of a person given their id.") + def search_person( + self, person_id: Annotated[str, "The person ID to search"] + ) -> Annotated[PersonDetails, "The details of the person"]: + return PersonDetails(id=person_id, name="John Doe", age=42) + class ChatCompletionTestBase(CompletionTestBase): """Base class for testing completion services.""" @@ -76,13 +116,13 @@ def services(self) -> dict[str, tuple[ServiceType, type[PromptExecutionSettings] azure_openai_settings = AzureOpenAISettings.create() endpoint = azure_openai_settings.endpoint deployment_name = azure_openai_settings.chat_deployment_name - api_key = azure_openai_settings.api_key.get_secret_value() + ad_token = azure_openai_settings.get_azure_openai_auth_token() api_version = azure_openai_settings.api_version azure_custom_client = AzureChatCompletion( async_client=AsyncAzureOpenAI( azure_endpoint=endpoint, azure_deployment=deployment_name, - api_key=api_key, + azure_ad_token=ad_token, api_version=api_version, default_headers={"Test-User-X-ID": "test"}, ), @@ -91,8 +131,9 @@ def services(self) -> dict[str, tuple[ServiceType, type[PromptExecutionSettings] ai_model_id=deployment_name, client=ChatCompletionsClient( endpoint=f'{str(endpoint).strip("/")}/openai/deployments/{deployment_name}', - credential=AzureKeyCredential(""), - headers={"api-key": api_key}, + credential=DefaultAzureCredential(), + credential_scopes=["https://cognitiveservices.azure.com/.default"], + api_version=DEFAULT_AZURE_API_VERSION, ), ) @@ -106,13 +147,14 @@ def services(self) -> dict[str, tuple[ServiceType, type[PromptExecutionSettings] MistralAIChatPromptExecutionSettings, ), "ollama": (OllamaChatCompletion() if ollama_setup else None, OllamaChatPromptExecutionSettings), - "google_ai": (GoogleAIChatCompletion(), GoogleAIChatPromptExecutionSettings), - "vertex_ai": (VertexAIChatCompletion(), VertexAIChatPromptExecutionSettings), + "google_ai": (GoogleAIChatCompletion() if google_ai_setup else None, GoogleAIChatPromptExecutionSettings), + "vertex_ai": (VertexAIChatCompletion() if vertex_ai_setup else None, VertexAIChatPromptExecutionSettings), } def setup(self, kernel: Kernel): """Setup the kernel with the completion service and function.""" kernel.add_plugin(MathPlugin(), plugin_name="math") + kernel.add_plugin(PersonSearchPlugin(), plugin_name="search") async def get_chat_completion_response( self, diff --git a/python/tests/integration/completions/test_chat_completion_with_function_calling.py b/python/tests/integration/completions/test_chat_completion_with_function_calling.py index 47cdfaa7294f..4ba364bef474 100644 --- a/python/tests/integration/completions/test_chat_completion_with_function_calling.py +++ b/python/tests/integration/completions/test_chat_completion_with_function_calling.py @@ -16,7 +16,13 @@ from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.kernel import Kernel -from tests.integration.completions.chat_completion_test_base import ChatCompletionTestBase +from tests.integration.completions.chat_completion_test_base import ( + ChatCompletionTestBase, + anthropic_setup, + google_ai_setup, + mistral_ai_setup, + vertex_ai_setup, +) from tests.integration.completions.completion_test_base import ServiceType from tests.integration.completions.test_utils import retry @@ -117,6 +123,24 @@ class FunctionChoiceTestTypes(str, Enum): {"test_type": FunctionChoiceTestTypes.FLOW}, id="openai_tool_call_flow", ), + pytest.param( + "openai", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="Find the person whose id is 9b3f6e40.")], + ), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + id="openai_tool_call_auto_complex_return_type", + ), pytest.param( "azure", {"function_choice_behavior": FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["task_plugin"]})}, @@ -192,6 +216,24 @@ class FunctionChoiceTestTypes(str, Enum): {"test_type": FunctionChoiceTestTypes.FLOW}, id="azure_tool_call_flow", ), + pytest.param( + "azure", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="Find the person whose id is 9b3f6e40.")], + ), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + id="azure_tool_call_auto_complex_return_type", + ), pytest.param( "azure_ai_inference", { @@ -261,6 +303,164 @@ class FunctionChoiceTestTypes(str, Enum): {"test_type": FunctionChoiceTestTypes.FLOW}, id="azure_ai_inference_tool_call_flow", ), + pytest.param( + "azure_ai_inference", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="Find the person whose id is 9b3f6e40.")], + ), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skip( + reason="Possible regression on the Azure AI Inference side when" + " returning tool calls in streaming responses. Investigating..." + ), + id="azure_ai_inference_tool_call_auto_complex_return_type", + ), + pytest.param( + "mistral_ai", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ), + "max_tokens": 256, + }, + [ + [ + ChatMessageContent( + role=AuthorRole.SYSTEM, + items=[TextContent(text="You're very bad at math. Don't attempt to do it yourself.")], + ), + ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="What is 345 + 3?")]), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skipif(not mistral_ai_setup, reason="Mistral AI Environment Variables not set"), + id="mistral_ai_tool_call_auto", + ), + pytest.param( + "mistral_ai", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=False, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.SYSTEM, + items=[TextContent(text="You're very bad at math. Don't attempt to do it yourself.")], + ), + ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="What is 345 + 3?")]), + ] + ], + {"test_type": FunctionChoiceTestTypes.NON_AUTO}, + marks=pytest.mark.skipif(not mistral_ai_setup, reason="Mistral AI Environment Variables not set"), + id="mistral_ai_tool_call_non_auto", + ), + pytest.param( + "mistral_ai", + {}, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="What was our 2024 revenue?")], + ), + ChatMessageContent( + role=AuthorRole.ASSISTANT, + items=[ + FunctionCallContent( + id="123456789", name="finance-search", arguments='{"company": "contoso", "year": 2024}' + ) + ], + ), + ChatMessageContent( + role=AuthorRole.TOOL, + items=[FunctionResultContent(id="123456789", name="finance-search", result="1.2B")], + ), + ], + ], + {"test_type": FunctionChoiceTestTypes.FLOW}, + marks=pytest.mark.skipif(not mistral_ai_setup, reason="Mistral AI Environment Variables not set"), + id="mistral_ai_tool_call_flow", + ), + pytest.param( + "anthropic", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ), + "max_tokens": 256, + }, + [ + [ + ChatMessageContent( + role=AuthorRole.SYSTEM, + items=[TextContent(text="You're very bad at math. Don't attempt to do it yourself.")], + ), + ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="What is 345 + 3?")]), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skipif(not anthropic_setup, reason="Anthropic Environment Variables not set"), + id="anthropic_tool_call_auto", + ), + pytest.param( + "anthropic", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=False, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.SYSTEM, + items=[TextContent(text="You're very bad at math. Don't attempt to do it yourself.")], + ), + ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="What is 345 + 3?")]), + ] + ], + {"test_type": FunctionChoiceTestTypes.NON_AUTO}, + marks=pytest.mark.skipif(not anthropic_setup, reason="Anthropic Environment Variables not set"), + id="anthropic_tool_call_non_auto", + ), + pytest.param( + "anthropic", + {}, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="What was our 2024 revenue?")], + ), + ChatMessageContent( + role=AuthorRole.ASSISTANT, + items=[ + FunctionCallContent( + id="123456789", name="finance-search", arguments='{"company": "contoso", "year": 2024}' + ) + ], + ), + ChatMessageContent( + role=AuthorRole.TOOL, + items=[FunctionResultContent(id="123456789", name="finance-search", result="1.2B")], + ), + ], + ], + {"test_type": FunctionChoiceTestTypes.FLOW}, + marks=pytest.mark.skipif(not anthropic_setup, reason="Anthropic Environment Variables not set"), + id="anthropic_tool_call_flow", + ), pytest.param( "google_ai", { @@ -279,6 +479,7 @@ class FunctionChoiceTestTypes(str, Enum): ] ], {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skipif(not google_ai_setup, reason="Google AI Environment Variables not set"), id="google_ai_tool_call_auto", ), pytest.param( @@ -298,6 +499,7 @@ class FunctionChoiceTestTypes(str, Enum): ] ], {"test_type": FunctionChoiceTestTypes.NON_AUTO}, + marks=pytest.mark.skipif(not google_ai_setup, reason="Google AI Environment Variables not set"), id="google_ai_tool_call_non_auto", ), pytest.param( @@ -327,6 +529,25 @@ class FunctionChoiceTestTypes(str, Enum): marks=pytest.mark.skip(reason="Skipping due to 429s from Google AI."), id="google_ai_tool_call_flow", ), + pytest.param( + "google_ai", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="Find the person whose id is 9b3f6e40.")], + ), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skipif(not google_ai_setup, reason="Google AI Environment Variables not set"), + id="google_ai_tool_call_auto_complex_return_type", + ), pytest.param( "vertex_ai", { @@ -345,6 +566,7 @@ class FunctionChoiceTestTypes(str, Enum): ] ], {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skipif(not vertex_ai_setup, reason="Vertex AI Environment Variables not set"), id="vertex_ai_tool_call_auto", ), pytest.param( @@ -364,6 +586,7 @@ class FunctionChoiceTestTypes(str, Enum): ] ], {"test_type": FunctionChoiceTestTypes.NON_AUTO}, + marks=pytest.mark.skipif(not vertex_ai_setup, reason="Vertex AI Environment Variables not set"), id="vertex_ai_tool_call_non_auto", ), pytest.param( @@ -390,8 +613,28 @@ class FunctionChoiceTestTypes(str, Enum): ], ], {"test_type": FunctionChoiceTestTypes.FLOW}, + marks=pytest.mark.skipif(not vertex_ai_setup, reason="Vertex AI Environment Variables not set"), id="vertex_ai_tool_call_flow", ), + pytest.param( + "vertex_ai", + { + "function_choice_behavior": FunctionChoiceBehavior.Auto( + auto_invoke=True, filters={"excluded_plugins": ["task_plugin"]} + ) + }, + [ + [ + ChatMessageContent( + role=AuthorRole.USER, + items=[TextContent(text="Find the person whose id is 9b3f6e40.")], + ), + ] + ], + {"test_type": FunctionChoiceTestTypes.AUTO}, + marks=pytest.mark.skipif(not vertex_ai_setup, reason="Vertex AI Environment Variables not set"), + id="vertex_ai_tool_call_auto_complex_return_type", + ), ], ) @@ -543,12 +786,9 @@ async def _test_helper( retries=5, ) - if test_type != FunctionChoiceTestTypes.AUTO or stream: - # Need to add the last response (the response from the model after it sees the tool call result) - # to the chat history. - # When not streaming: responses from within the auto invoke loop will be added to the history. - # When streaming, responses will not add the message to the history if the response doesn't - # contain a FunctionCallContent - history.add_message(cmc) + # We need to add the latest message to the history because the connector is + # not responsible for updating the history, unless it is related to auto function + # calling, when the history is updated after the function calls are invoked. + history.add_message(cmc) self.evaluate(history, inputs=inputs, test_type=test_type) diff --git a/python/tests/integration/completions/test_chat_completion_with_image_input_text_output.py b/python/tests/integration/completions/test_chat_completion_with_image_input_text_output.py index 3ca6c41e6e36..b70b7585c81d 100644 --- a/python/tests/integration/completions/test_chat_completion_with_image_input_text_output.py +++ b/python/tests/integration/completions/test_chat_completion_with_image_input_text_output.py @@ -12,7 +12,11 @@ from semantic_kernel.contents import ChatHistory, ChatMessageContent, TextContent from semantic_kernel.contents.image_content import ImageContent from semantic_kernel.contents.utils.author_role import AuthorRole -from tests.integration.completions.chat_completion_test_base import ChatCompletionTestBase +from tests.integration.completions.chat_completion_test_base import ( + ChatCompletionTestBase, + google_ai_setup, + vertex_ai_setup, +) from tests.integration.completions.completion_test_base import ServiceType from tests.integration.completions.test_utils import retry @@ -156,6 +160,7 @@ ), ], {}, + marks=pytest.mark.skipif(not google_ai_setup, reason="Google AI Environment Variables not set"), id="google_ai_image_input_file", ), pytest.param( @@ -177,6 +182,7 @@ ), ], {}, + marks=pytest.mark.skipif(not vertex_ai_setup, reason="Vertex AI Environment Variables not set"), id="vertex_ai_image_input_file", ), ], diff --git a/python/tests/integration/completions/test_chat_completions.py b/python/tests/integration/completions/test_chat_completions.py index 417a35858548..505f7100a2a9 100644 --- a/python/tests/integration/completions/test_chat_completions.py +++ b/python/tests/integration/completions/test_chat_completions.py @@ -1,6 +1,5 @@ # Copyright (c) Microsoft. All rights reserved. -import os import sys from functools import partial from typing import Any @@ -12,7 +11,14 @@ from semantic_kernel.contents import ChatMessageContent, TextContent from semantic_kernel.contents.chat_history import ChatHistory from semantic_kernel.contents.utils.author_role import AuthorRole -from tests.integration.completions.chat_completion_test_base import ChatCompletionTestBase +from semantic_kernel.kernel_pydantic import KernelBaseModel +from tests.integration.completions.chat_completion_test_base import ( + ChatCompletionTestBase, + anthropic_setup, + mistral_ai_setup, + ollama_setup, + vertex_ai_setup, +) from tests.integration.completions.completion_test_base import ServiceType from tests.integration.completions.test_utils import retry @@ -21,26 +27,15 @@ else: from typing_extensions import override # pragma: no cover -mistral_ai_setup: bool = False -try: - if os.environ["MISTRALAI_API_KEY"] and os.environ["MISTRALAI_CHAT_MODEL_ID"]: - mistral_ai_setup = True -except KeyError: - mistral_ai_setup = False -ollama_setup: bool = False -try: - if os.environ["OLLAMA_MODEL"]: - ollama_setup = True -except KeyError: - ollama_setup = False +class Step(KernelBaseModel): + explanation: str + output: str + -anthropic_setup: bool = False -try: - if os.environ["ANTHROPIC_API_KEY"] and os.environ["ANTHROPIC_CHAT_MODEL_ID"]: - anthropic_setup = True -except KeyError: - anthropic_setup = False +class Reasoning(KernelBaseModel): + steps: list[Step] + final_answer: str pytestmark = pytest.mark.parametrize( @@ -56,6 +51,16 @@ {}, id="openai_text_input", ), + pytest.param( + "openai", + {"response_format": Reasoning}, + [ + ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="Hello")]), + ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="How are you today?")]), + ], + {}, + id="openai_json_schema_response_format", + ), pytest.param( "azure", {}, @@ -108,7 +113,7 @@ marks=pytest.mark.skipif(not ollama_setup, reason="Need local Ollama setup"), id="ollama_text_input", ), - pytest.param( + pytest.param( "anthropic", {}, [ @@ -138,6 +143,7 @@ ChatMessageContent(role=AuthorRole.USER, items=[TextContent(text="How are you today?")]), ], ["Hello", "well"], + marks=pytest.mark.skipif(not vertex_ai_setup, reason="Vertex AI Environment Variables not set"), id="vertex_ai_text_input", ), ], diff --git a/python/tests/integration/completions/test_text_completion.py b/python/tests/integration/completions/test_text_completion.py index 56b11ddc3fad..ae835fd6ac16 100644 --- a/python/tests/integration/completions/test_text_completion.py +++ b/python/tests/integration/completions/test_text_completion.py @@ -131,13 +131,13 @@ def services(self) -> dict[str, tuple[ServiceType, type[PromptExecutionSettings] azure_openai_settings = AzureOpenAISettings.create() endpoint = azure_openai_settings.endpoint deployment_name = azure_openai_settings.text_deployment_name - api_key = azure_openai_settings.api_key.get_secret_value() + ad_token = azure_openai_settings.get_azure_openai_auth_token() api_version = azure_openai_settings.api_version azure_custom_client = AzureTextCompletion( async_client=AsyncAzureOpenAI( azure_endpoint=endpoint, azure_deployment=deployment_name, - api_key=api_key, + azure_ad_token=ad_token, api_version=api_version, default_headers={"Test-User-X-ID": "test"}, ), diff --git a/python/tests/integration/connectors/memory/test_postgres.py b/python/tests/integration/connectors/memory/test_postgres.py index fa57e253b5a4..ddab10fd3b51 100644 --- a/python/tests/integration/connectors/memory/test_postgres.py +++ b/python/tests/integration/connectors/memory/test_postgres.py @@ -1,214 +1,220 @@ # Copyright (c) Microsoft. All rights reserved. -import time +import uuid +from collections.abc import AsyncGenerator +from contextlib import asynccontextmanager +from typing import Annotated, Any +import pandas as pd import pytest -from psycopg_pool import PoolTimeout -from pydantic import ValidationError +import pytest_asyncio +from pydantic import BaseModel -from semantic_kernel.connectors.memory.postgres import PostgresMemoryStore +from semantic_kernel.connectors.memory.postgres import PostgresStore from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings -from semantic_kernel.exceptions import ServiceResourceNotFoundError +from semantic_kernel.data.const import DistanceFunction, IndexKind +from semantic_kernel.data.vector_store_model_decorator import vectorstoremodel +from semantic_kernel.data.vector_store_model_definition import VectorStoreRecordDefinition +from semantic_kernel.data.vector_store_record_fields import ( + VectorStoreRecordDataField, + VectorStoreRecordKeyField, + VectorStoreRecordVectorField, +) try: import psycopg # noqa: F401 - - psycopg_installed = True -except ImportError: - psycopg_installed = False - -pytestmark = pytest.mark.skipif(not psycopg_installed, reason="psycopg is not installed") - -try: import psycopg_pool # noqa: F401 psycopg_pool_installed = True except ImportError: psycopg_pool_installed = False -pytestmark = pytest.mark.skipif(not psycopg_pool_installed, reason="psycopg_pool is not installed") - - -# Needed because the test service may not support a high volume of requests -@pytest.fixture(scope="module") -def wait_between_tests(): - time.sleep(0.5) - return 0 - - -@pytest.fixture(scope="session") -def connection_string(): +pg_settings: PostgresSettings = PostgresSettings.create() +connection_params_present = any(pg_settings.get_connection_args().values()) + +pytestmark = pytest.mark.skipif( + not (psycopg_pool_installed or connection_params_present), + reason="psycopg_pool is not installed" if not psycopg_pool_installed else "No connection parameters provided", +) + + +@vectorstoremodel +class SimpleDataModel(BaseModel): + id: Annotated[int, VectorStoreRecordKeyField()] + embedding: Annotated[ + list[float], + VectorStoreRecordVectorField( + index_kind=IndexKind.HNSW, + dimensions=3, + distance_function=DistanceFunction.COSINE, + ), + ] + data: Annotated[ + dict[str, Any], + VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), + ] + + +def DataModelPandas(record) -> tuple: + definition = VectorStoreRecordDefinition( + fields={ + "embedding": VectorStoreRecordVectorField( + name="embedding", + index_kind="hnsw", + dimensions=3, + distance_function="cosine", + property_type="float", + ), + "id": VectorStoreRecordKeyField(name="id", property_type="int"), + "data": VectorStoreRecordDataField( + name="data", has_embedding=True, embedding_property_name="embedding", property_type="dict" + ), + }, + container_mode=True, + to_dict=lambda x: x.to_dict(orient="records"), + from_dict=lambda x, **_: pd.DataFrame(x), + ) + df = pd.DataFrame([record]) + return definition, df + + +@pytest_asyncio.fixture(scope="session") +async def vector_store() -> AsyncGenerator[PostgresStore, None]: + async with await pg_settings.create_connection_pool() as pool: + yield PostgresStore(connection_pool=pool) + + +@asynccontextmanager +async def create_simple_collection(vector_store: PostgresStore): + """Returns a collection with a unique name that is deleted after the context. + + This can be moved to use a fixture with scope=function and loop_scope=session + after upgrade to pytest-asyncio 0.24. With the current version, the fixture + would both cache and use the event loop of the declared scope. + """ + suffix = str(uuid.uuid4()).replace("-", "")[:8] + collection_id = f"test_collection_{suffix}" + collection = vector_store.get_collection(collection_id, SimpleDataModel) + await collection.create_collection() try: - postgres_settings = PostgresSettings.create() - return postgres_settings.connection_string.get_secret_value() - except ValidationError: - pytest.skip("Postgres Connection string not found in env vars.") - - -def test_constructor(connection_string): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - assert memory._connection_pool is not None + yield collection + finally: + await collection.delete_collection() -@pytest.mark.asyncio -async def test_create_and_does_collection_exist(connection_string): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - result = await memory.does_collection_exist("test_collection") - assert result is not None - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") - +def test_create_store(vector_store): + assert vector_store.connection_pool is not None -@pytest.mark.asyncio -async def test_get_collections(connection_string): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - result = await memory.get_collections() - assert "test_collection" in result - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") +@pytest.mark.asyncio(scope="session") +async def test_create_does_collection_exist_and_delete(vector_store: PostgresStore): + suffix = str(uuid.uuid4()).replace("-", "")[:8] + collection = vector_store.get_collection(f"test_collection_{suffix}", SimpleDataModel) -@pytest.mark.asyncio -async def test_delete_collection(connection_string): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") + does_exist_1 = await collection.does_collection_exist() + assert does_exist_1 is False - result = await memory.get_collections() - assert "test_collection" in result + await collection.create_collection() + does_exist_2 = await collection.does_collection_exist() + assert does_exist_2 is True - await memory.delete_collection("test_collection") - result = await memory.get_collections() - assert "test_collection" not in result - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") + await collection.delete_collection() + does_exist_3 = await collection.does_collection_exist() + assert does_exist_3 is False -@pytest.mark.asyncio -async def test_does_collection_exist(connection_string): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - result = await memory.does_collection_exist("test_collection") - assert result is True - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") +@pytest.mark.asyncio(scope="session") +async def test_list_collection_names(vector_store): + async with create_simple_collection(vector_store) as simple_collection: + simple_collection_id = simple_collection.collection_name + result = await vector_store.list_collection_names() + assert simple_collection_id in result -@pytest.mark.asyncio -async def test_upsert_and_get(connection_string, memory_record1): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - await memory.upsert("test_collection", memory_record1) - result = await memory.get("test_collection", memory_record1._id, with_embedding=True) - assert result is not None - assert result._id == memory_record1._id - assert result._text == memory_record1._text - assert result._timestamp == memory_record1._timestamp - for i in range(len(result._embedding)): - assert result._embedding[i] == memory_record1._embedding[i] - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") - - -@pytest.mark.asyncio -async def test_upsert_batch_and_get_batch(connection_string, memory_record1, memory_record2): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - - results = await memory.get_batch( - "test_collection", - [memory_record1._id, memory_record2._id], - with_embeddings=True, - ) - assert len(results) == 2 - assert results[0]._id in [memory_record1._id, memory_record2._id] - assert results[1]._id in [memory_record1._id, memory_record2._id] - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") - - -@pytest.mark.asyncio -async def test_remove(connection_string, memory_record1): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - await memory.upsert("test_collection", memory_record1) +@pytest.mark.asyncio(scope="session") +async def test_upsert_get_and_delete(vector_store: PostgresStore): + record = SimpleDataModel(id=1, embedding=[1.1, 2.2, 3.3], data={"key": "value"}) + async with create_simple_collection(vector_store) as simple_collection: + result_before_upsert = await simple_collection.get(1) + assert result_before_upsert is None - result = await memory.get("test_collection", memory_record1._id, with_embedding=True) + await simple_collection.upsert(record) + result = await simple_collection.get(1) assert result is not None + assert result.id == record.id + assert result.embedding == record.embedding + assert result.data == record.data + + # Check that the table has an index + connection_pool = simple_collection.connection_pool + async with connection_pool.connection() as conn, conn.cursor() as cur: + await cur.execute( + "SELECT indexname FROM pg_indexes WHERE tablename = %s", (simple_collection.collection_name,) + ) + rows = await cur.fetchall() + index_names = [index[0] for index in rows] + assert any("embedding_idx" in index_name for index_name in index_names) + + await simple_collection.delete(1) + result_after_delete = await simple_collection.get(1) + assert result_after_delete is None + + +@pytest.mark.asyncio(scope="session") +async def test_upsert_get_and_delete_pandas(vector_store): + record = SimpleDataModel(id=1, embedding=[1.1, 2.2, 3.3], data={"key": "value"}) + definition, df = DataModelPandas(record.model_dump()) + + suffix = str(uuid.uuid4()).replace("-", "")[:8] + collection = vector_store.get_collection( + f"test_collection_{suffix}", data_model_type=pd.DataFrame, data_model_definition=definition + ) + await collection.create_collection() - await memory.remove("test_collection", memory_record1._id) - with pytest.raises(ServiceResourceNotFoundError): - await memory.get("test_collection", memory_record1._id, with_embedding=True) - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") - - -@pytest.mark.asyncio -async def test_remove_batch(connection_string, memory_record1, memory_record2): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) try: - await memory.create_collection("test_collection") - await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - await memory.remove_batch("test_collection", [memory_record1._id, memory_record2._id]) - with pytest.raises(ServiceResourceNotFoundError): - _ = await memory.get("test_collection", memory_record1._id, with_embedding=True) - - with pytest.raises(ServiceResourceNotFoundError): - _ = await memory.get("test_collection", memory_record2._id, with_embedding=True) - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") - + result_before_upsert = await collection.get(1) + assert result_before_upsert is None -@pytest.mark.asyncio -async def test_get_nearest_match(connection_string, memory_record1, memory_record2): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) - test_embedding = memory_record1.embedding.copy() - test_embedding[0] = test_embedding[0] + 0.01 - - result = await memory.get_nearest_match( - "test_collection", test_embedding, min_relevance_score=0.0, with_embedding=True - ) + await collection.upsert(df) + result: pd.DataFrame = await collection.get(1) + assert result is not None + row = result.iloc[0] + assert row.id == record.id + assert row.embedding == record.embedding + assert row.data == record.data + + await collection.delete(1) + result_after_delete = await collection.get(1) + assert result_after_delete is None + finally: + await collection.delete_collection() + + +@pytest.mark.asyncio(scope="session") +async def test_upsert_get_and_delete_batch(vector_store: PostgresStore): + async with create_simple_collection(vector_store) as simple_collection: + record1 = SimpleDataModel(id=1, embedding=[1.1, 2.2, 3.3], data={"key": "value"}) + record2 = SimpleDataModel(id=2, embedding=[4.4, 5.5, 6.6], data={"key": "value"}) + + result_before_upsert = await simple_collection.get_batch([1, 2]) + assert result_before_upsert is None + + await simple_collection.upsert_batch([record1, record2]) + # Test get_batch for the two existing keys and one non-existing key; + # this should return only the two existing records. + result = await simple_collection.get_batch([1, 2, 3]) assert result is not None - assert result[0]._id == memory_record1._id - assert result[0]._text == memory_record1._text - assert result[0]._timestamp == memory_record1._timestamp - for i in range(len(result[0]._embedding)): - assert result[0]._embedding[i] == memory_record1._embedding[i] - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") - - -@pytest.mark.asyncio -async def test_get_nearest_matches(connection_string, memory_record1, memory_record2, memory_record3): - memory = PostgresMemoryStore(connection_string, 2, 1, 5) - try: - await memory.create_collection("test_collection") - await memory.upsert_batch("test_collection", [memory_record1, memory_record2, memory_record3]) - test_embedding = memory_record2.embedding - test_embedding[0] = test_embedding[0] + 0.025 - - result = await memory.get_nearest_matches( - "test_collection", - test_embedding, - limit=2, - min_relevance_score=0.0, - with_embeddings=True, - ) assert len(result) == 2 - assert result[0][0]._id in [memory_record3._id, memory_record2._id] - assert result[1][0]._id in [memory_record3._id, memory_record2._id] - except PoolTimeout: - pytest.skip("PoolTimeout exception raised, skipping test.") + assert result[0] is not None + assert result[0].id == record1.id + assert result[0].embedding == record1.embedding + assert result[0].data == record1.data + assert result[1] is not None + assert result[1].id == record2.id + assert result[1].embedding == record2.embedding + assert result[1].data == record2.data + + await simple_collection.delete_batch([1, 2]) + result_after_delete = await simple_collection.get_batch([1, 2]) + assert result_after_delete is None diff --git a/python/tests/integration/connectors/memory/test_postgres_memory_store.py b/python/tests/integration/connectors/memory/test_postgres_memory_store.py new file mode 100644 index 000000000000..b73632d4c295 --- /dev/null +++ b/python/tests/integration/connectors/memory/test_postgres_memory_store.py @@ -0,0 +1,210 @@ +# Copyright (c) Microsoft. All rights reserved. + +import time + +import pytest +from psycopg_pool import PoolTimeout +from pydantic import ValidationError + +from semantic_kernel.connectors.memory.postgres import PostgresMemoryStore +from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings +from semantic_kernel.exceptions import ServiceResourceNotFoundError + +try: + import psycopg # noqa: F401 + + psycopg_installed = True +except ImportError: + psycopg_installed = False + +pytestmark = pytest.mark.skipif(not psycopg_installed, reason="psycopg is not installed") + +try: + import psycopg_pool # noqa: F401 + + psycopg_pool_installed = True +except ImportError: + psycopg_pool_installed = False + +pytestmark = pytest.mark.skipif(not psycopg_pool_installed, reason="psycopg_pool is not installed") + + +# Needed because the test service may not support a high volume of requests +@pytest.fixture(scope="module") +def wait_between_tests(): + time.sleep(0.5) + return 0 + + +@pytest.fixture(scope="session") +def connection_string(): + try: + postgres_settings = PostgresSettings.create() + return postgres_settings.connection_string.get_secret_value() + except ValidationError: + pytest.skip("Postgres Connection string not found in env vars.") + + +def test_constructor(connection_string): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + assert memory._connection_pool is not None + + +@pytest.mark.asyncio +async def test_create_and_does_collection_exist(connection_string): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + await memory.create_collection("test_collection") + result = await memory.does_collection_exist("test_collection") + assert result is not None + + +@pytest.mark.asyncio +async def test_get_collections(connection_string): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + result = await memory.get_collections() + assert "test_collection" in result + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_delete_collection(connection_string): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + + result = await memory.get_collections() + assert "test_collection" in result + + await memory.delete_collection("test_collection") + result = await memory.get_collections() + assert "test_collection" not in result + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_does_collection_exist(connection_string): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + result = await memory.does_collection_exist("test_collection") + assert result is True + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_upsert_and_get(connection_string, memory_record1): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + await memory.upsert("test_collection", memory_record1) + result = await memory.get("test_collection", memory_record1._id, with_embedding=True) + assert result is not None + assert result._id == memory_record1._id + assert result._text == memory_record1._text + assert result._timestamp == memory_record1._timestamp + for i in range(len(result._embedding)): + assert result._embedding[i] == memory_record1._embedding[i] + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_upsert_batch_and_get_batch(connection_string, memory_record1, memory_record2): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + + results = await memory.get_batch( + "test_collection", + [memory_record1._id, memory_record2._id], + with_embeddings=True, + ) + assert len(results) == 2 + assert results[0]._id in [memory_record1._id, memory_record2._id] + assert results[1]._id in [memory_record1._id, memory_record2._id] + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_remove(connection_string, memory_record1): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + await memory.upsert("test_collection", memory_record1) + + result = await memory.get("test_collection", memory_record1._id, with_embedding=True) + assert result is not None + + await memory.remove("test_collection", memory_record1._id) + with pytest.raises(ServiceResourceNotFoundError): + await memory.get("test_collection", memory_record1._id, with_embedding=True) + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_remove_batch(connection_string, memory_record1, memory_record2): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + await memory.remove_batch("test_collection", [memory_record1._id, memory_record2._id]) + with pytest.raises(ServiceResourceNotFoundError): + _ = await memory.get("test_collection", memory_record1._id, with_embedding=True) + + with pytest.raises(ServiceResourceNotFoundError): + _ = await memory.get("test_collection", memory_record2._id, with_embedding=True) + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_get_nearest_match(connection_string, memory_record1, memory_record2): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2]) + test_embedding = memory_record1.embedding.copy() + test_embedding[0] = test_embedding[0] + 0.01 + + result = await memory.get_nearest_match( + "test_collection", test_embedding, min_relevance_score=0.0, with_embedding=True + ) + assert result is not None + assert result[0]._id == memory_record1._id + assert result[0]._text == memory_record1._text + assert result[0]._timestamp == memory_record1._timestamp + for i in range(len(result[0]._embedding)): + assert result[0]._embedding[i] == memory_record1._embedding[i] + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") + + +@pytest.mark.asyncio +async def test_get_nearest_matches(connection_string, memory_record1, memory_record2, memory_record3): + with PostgresMemoryStore(connection_string, 2, 1, 5) as memory: + try: + await memory.create_collection("test_collection") + await memory.upsert_batch("test_collection", [memory_record1, memory_record2, memory_record3]) + test_embedding = memory_record2.embedding + test_embedding[0] = test_embedding[0] + 0.025 + + result = await memory.get_nearest_matches( + "test_collection", + test_embedding, + limit=2, + min_relevance_score=0.0, + with_embeddings=True, + ) + assert len(result) == 2 + assert result[0][0]._id in [memory_record3._id, memory_record2._id] + assert result[1][0]._id in [memory_record3._id, memory_record2._id] + except PoolTimeout: + pytest.skip("PoolTimeout exception raised, skipping test.") diff --git a/python/tests/integration/cross_language/test_cross_language.py b/python/tests/integration/cross_language/test_cross_language.py index 15f8374255ef..456874a95a76 100644 --- a/python/tests/integration/cross_language/test_cross_language.py +++ b/python/tests/integration/cross_language/test_cross_language.py @@ -222,6 +222,7 @@ async def test_prompt_with_chat_roles(is_inline, is_streaming, template_format, if is_streaming: expected_object["stream"] = True + expected_object["stream_options"] = {"include_usage": True} assert obtained_object == expected_object @@ -281,6 +282,7 @@ async def test_prompt_with_complex_objects(is_inline, is_streaming, template_for if is_streaming: expected_object["stream"] = True + expected_object["stream_options"] = {"include_usage": True} assert obtained_object == expected_object @@ -351,6 +353,7 @@ async def test_prompt_with_helper_functions(is_inline, is_streaming, template_fo if is_streaming: expected_object["stream"] = True + expected_object["stream_options"] = {"include_usage": True} assert obtained_object == expected_object @@ -410,6 +413,7 @@ async def test_prompt_with_simple_variable(is_inline, is_streaming, template_for if is_streaming: expected_object["stream"] = True + expected_object["stream_options"] = {"include_usage": True} assert obtained_object == expected_object @@ -468,6 +472,7 @@ async def test_simple_prompt(is_inline, is_streaming, template_format, prompt): if is_streaming: expected_object["stream"] = True + expected_object["stream_options"] = {"include_usage": True} assert obtained_object == expected_object @@ -523,6 +528,7 @@ async def test_yaml_prompt(is_streaming, prompt_path, expected_result_path, kern if is_streaming: expected_object["stream"] = True + expected_object["stream_options"] = {"include_usage": True} assert obtained_object == expected_object diff --git a/python/tests/integration/embeddings/test_azure_ai_inference_embedding_service.py b/python/tests/integration/embeddings/test_azure_ai_inference_embedding_service.py index 763512e9e461..b6d16adbedac 100644 --- a/python/tests/integration/embeddings/test_azure_ai_inference_embedding_service.py +++ b/python/tests/integration/embeddings/test_azure_ai_inference_embedding_service.py @@ -3,11 +3,12 @@ import pytest from azure.ai.inference.aio import EmbeddingsClient -from azure.core.credentials import AzureKeyCredential +from azure.identity import DefaultAzureCredential from semantic_kernel.connectors.ai.azure_ai_inference.services.azure_ai_inference_text_embedding import ( AzureAIInferenceTextEmbedding, ) +from semantic_kernel.connectors.ai.open_ai.const import DEFAULT_AZURE_API_VERSION from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings from semantic_kernel.core_plugins.text_memory_plugin import TextMemoryPlugin from semantic_kernel.kernel import Kernel @@ -21,14 +22,14 @@ async def test_azure_ai_inference_embedding_service(kernel: Kernel): azure_openai_settings = AzureOpenAISettings.create() endpoint = azure_openai_settings.endpoint deployment_name = azure_openai_settings.embedding_deployment_name - api_key = azure_openai_settings.api_key.get_secret_value() embeddings_gen = AzureAIInferenceTextEmbedding( ai_model_id=deployment_name, client=EmbeddingsClient( endpoint=f'{str(endpoint).strip("/")}/openai/deployments/{deployment_name}', - credential=AzureKeyCredential(""), - headers={"api-key": api_key}, + credential=DefaultAzureCredential(), + credential_scopes=["https://cognitiveservices.azure.com/.default"], + api_version=DEFAULT_AZURE_API_VERSION, ), ) diff --git a/python/tests/integration/embeddings/test_azure_oai_embedding_service.py b/python/tests/integration/embeddings/test_azure_oai_embedding_service.py index 957fd455c363..18cdbb79e941 100644 --- a/python/tests/integration/embeddings/test_azure_oai_embedding_service.py +++ b/python/tests/integration/embeddings/test_azure_oai_embedding_service.py @@ -35,17 +35,16 @@ async def test_azure_text_embedding_service(kernel: Kernel): @pytest.mark.asyncio async def test_azure_text_embedding_service_with_provided_client(kernel: Kernel): - azure_openai_settings = AzureOpenAISettings.create() endpoint = azure_openai_settings.endpoint deployment_name = azure_openai_settings.embedding_deployment_name - api_key = azure_openai_settings.api_key.get_secret_value() + ad_token = azure_openai_settings.get_azure_openai_auth_token() api_version = azure_openai_settings.api_version client = AsyncAzureOpenAI( azure_endpoint=endpoint, azure_deployment=deployment_name, - api_key=api_key, + azure_ad_token=ad_token, api_version=api_version, default_headers={"Test-User-X-ID": "test"}, ) diff --git a/python/tests/unit/agents/test_agent_chat.py b/python/tests/unit/agents/test_agent_chat.py index 622c3654f853..5b50a999a202 100644 --- a/python/tests/unit/agents/test_agent_chat.py +++ b/python/tests/unit/agents/test_agent_chat.py @@ -132,6 +132,31 @@ async def mock_invoke(*args, **kwargs): await agent_chat.reset() +@pytest.mark.asyncio +async def test_invoke_streaming_agent(agent_chat, agent, chat_message): + mock_channel = mock.MagicMock(spec=AgentChannel) + + async def mock_invoke(*args, **kwargs): + yield chat_message + + mock_channel.invoke_stream.side_effect = mock_invoke + + with ( + patch( + "semantic_kernel.agents.group_chat.agent_chat.AgentChat._get_or_create_channel", return_value=mock_channel + ), + patch( + "semantic_kernel.agents.group_chat.broadcast_queue.BroadcastQueue.enqueue", + return_value=AsyncMock(), + ), + ): + async for _ in agent_chat.invoke_agent_stream(agent): + pass + + mock_channel.invoke_stream.assert_called_once_with(agent, []) + await agent_chat.reset() + + @pytest.mark.asyncio async def test_synchronize_channel_with_existing_channel(agent_chat): mock_channel = MagicMock(spec=AgentChannel) diff --git a/python/tests/unit/agents/test_agent_group_chat.py b/python/tests/unit/agents/test_agent_group_chat.py index e4f607098527..9aeb61d56d38 100644 --- a/python/tests/unit/agents/test_agent_group_chat.py +++ b/python/tests/unit/agents/test_agent_group_chat.py @@ -34,6 +34,9 @@ def selection_strategy(): return AsyncMock(spec=SelectionStrategy) +# region Non-Streaming + + def test_agent_group_chat_initialization(agents, termination_strategy, selection_strategy): group_chat = AgentGroupChat( agents=agents, termination_strategy=termination_strategy, selection_strategy=selection_strategy @@ -132,7 +135,7 @@ async def mock_invoke_gen(*args, **kwargs): @pytest.mark.asyncio async def test_invoke_with_complete_chat(agents, termination_strategy): termination_strategy.automatic_reset = False - group_chat = AgentGroupChat(termination_strategy=termination_strategy) + group_chat = AgentGroupChat(agents=agents, termination_strategy=termination_strategy) group_chat.is_complete = True with pytest.raises(AgentChatException, match="Chat is already complete"): @@ -140,6 +143,15 @@ async def test_invoke_with_complete_chat(agents, termination_strategy): pass +@pytest.mark.asyncio +async def test_invoke_agent_with_none_defined_errors(agents): + group_chat = AgentGroupChat() + + with pytest.raises(AgentChatException, match="No agents are available"): + async for _ in group_chat.invoke(): + pass + + @pytest.mark.asyncio async def test_invoke_selection_strategy_error(agents, selection_strategy): group_chat = AgentGroupChat(agents=agents, selection_strategy=selection_strategy) @@ -206,3 +218,139 @@ async def mock_invoke_agent(*args, **kwargs): iteration_count += 1 assert iteration_count == 2 + + +# endregion + +# region Streaming + + +@pytest.mark.asyncio +async def test_invoke_streaming_single_turn(agents, termination_strategy): + group_chat = AgentGroupChat(termination_strategy=termination_strategy) + + async def mock_invoke(agent, is_joining=True): + yield MagicMock(role=AuthorRole.ASSISTANT) + + with mock.patch.object(AgentGroupChat, "invoke_stream", side_effect=mock_invoke): + termination_strategy.should_terminate.return_value = False + + async for message in group_chat.invoke_stream_single_turn(agents[0]): + assert message.role == AuthorRole.ASSISTANT + + termination_strategy.should_terminate.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_invoke_stream_with_agent_joining(agents, termination_strategy): + for agent in agents: + agent.name = f"Agent {agent.id}" + agent.id = f"agent-{agent.id}" + + group_chat = AgentGroupChat(termination_strategy=termination_strategy) + + with ( + mock.patch.object(AgentGroupChat, "add_agent", autospec=True) as mock_add_agent, + mock.patch.object(AgentChat, "invoke_agent_stream", autospec=True) as mock_invoke_agent, + ): + + async def mock_invoke_gen(*args, **kwargs): + yield MagicMock(role=AuthorRole.ASSISTANT) + + mock_invoke_agent.side_effect = mock_invoke_gen + + async for _ in group_chat.invoke_stream(agents[0], is_joining=True): + pass + + mock_add_agent.assert_called_once_with(group_chat, agents[0]) + + +@pytest.mark.asyncio +async def test_invoke_stream_with_complete_chat(agents, termination_strategy): + termination_strategy.automatic_reset = False + group_chat = AgentGroupChat(agents=agents, termination_strategy=termination_strategy) + group_chat.is_complete = True + + with pytest.raises(AgentChatException, match="Chat is already complete"): + async for _ in group_chat.invoke_stream(): + pass + + +@pytest.mark.asyncio +async def test_invoke_stream_selection_strategy_error(agents, selection_strategy): + group_chat = AgentGroupChat(agents=agents, selection_strategy=selection_strategy) + + selection_strategy.next.side_effect = Exception("Selection failed") + + with pytest.raises(AgentChatException, match="Failed to select agent"): + async for _ in group_chat.invoke_stream(): + pass + + +@pytest.mark.asyncio +async def test_invoke_stream_iterations(agents, termination_strategy, selection_strategy): + for agent in agents: + agent.name = f"Agent {agent.id}" + agent.id = f"agent-{agent.id}" + + termination_strategy.maximum_iterations = 2 + + group_chat = AgentGroupChat( + agents=agents, termination_strategy=termination_strategy, selection_strategy=selection_strategy + ) + + selection_strategy.next.side_effect = lambda agents, history: agents[0] + + async def mock_invoke_agent(*args, **kwargs): + yield MagicMock(role=AuthorRole.ASSISTANT) + + with mock.patch.object(AgentChat, "invoke_agent_stream", side_effect=mock_invoke_agent): + termination_strategy.should_terminate.return_value = False + + iteration_count = 0 + async for _ in group_chat.invoke_stream(): + iteration_count += 1 + + assert iteration_count == 2 + + +@pytest.mark.asyncio +async def test_invoke_stream_is_complete_then_reset(agents, termination_strategy, selection_strategy): + for agent in agents: + agent.name = f"Agent {agent.id}" + agent.id = f"agent-{agent.id}" + + termination_strategy.maximum_iterations = 2 + termination_strategy.automatic_reset = True + + group_chat = AgentGroupChat( + agents=agents, termination_strategy=termination_strategy, selection_strategy=selection_strategy + ) + + group_chat.is_complete = True + + selection_strategy.next.side_effect = lambda agents, history: agents[0] + + async def mock_invoke_agent(*args, **kwargs): + yield MagicMock(role=AuthorRole.ASSISTANT) + + with mock.patch.object(AgentChat, "invoke_agent_stream", side_effect=mock_invoke_agent): + termination_strategy.should_terminate.return_value = False + + iteration_count = 0 + async for _ in group_chat.invoke_stream(): + iteration_count += 1 + + assert iteration_count == 2 + + +@pytest.mark.asyncio +async def test_invoke_streaming_agent_with_none_defined_errors(agents): + group_chat = AgentGroupChat() + + with pytest.raises(AgentChatException, match="No agents are available"): + async for _ in group_chat.invoke_stream(): + pass + + +# endregion diff --git a/python/tests/unit/agents/test_azure_assistant_agent.py b/python/tests/unit/agents/test_azure_assistant_agent.py index 4dc6cf8090c3..2d6e1b50e698 100644 --- a/python/tests/unit/agents/test_azure_assistant_agent.py +++ b/python/tests/unit/agents/test_azure_assistant_agent.py @@ -239,7 +239,7 @@ async def test_retrieve_agent(kernel, azure_openai_unit_test_env): mock_client_instance.beta = MagicMock() mock_client_instance.beta.assistants = MagicMock() - mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock()) + mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock(spec=Assistant)) OpenAIAssistantBase._create_open_ai_assistant_definition = MagicMock( return_value={ diff --git a/python/tests/unit/agents/test_chat_history_channel.py b/python/tests/unit/agents/test_chat_history_channel.py index 4d0bb84997b1..ea1b3afbab34 100644 --- a/python/tests/unit/agents/test_chat_history_channel.py +++ b/python/tests/unit/agents/test_chat_history_channel.py @@ -19,6 +19,10 @@ async def invoke(self, history: list[ChatMessageContent]) -> AsyncIterable[ChatM for message in history: yield ChatMessageContent(role=AuthorRole.SYSTEM, content=f"Processed: {message.content}") + async def invoke_stream(self, history: list[ChatMessageContent]) -> AsyncIterable[ChatMessageContent]: + for message in history: + yield ChatMessageContent(role=AuthorRole.SYSTEM, content=f"Processed: {message.content}") + class MockNonChatHistoryHandler: """Mock agent to test incorrect instance handling.""" @@ -62,6 +66,32 @@ async def mock_invoke(history: list[ChatMessageContent]): assert "Processed: Initial message" in received_messages[0].content +@pytest.mark.asyncio +async def test_invoke_stream(): + channel = ChatHistoryChannel() + agent = AsyncMock(spec=MockChatHistoryHandler) + + async def mock_invoke(history: list[ChatMessageContent]): + for message in history: + msg = ChatMessageContent(role=AuthorRole.SYSTEM, content=f"Processed: {message.content}") + yield msg + channel.add_message(msg) + + agent.invoke_stream.return_value = AsyncIterableMock( + lambda: mock_invoke([ChatMessageContent(role=AuthorRole.USER, content="Initial message")]) + ) + + initial_message = ChatMessageContent(role=AuthorRole.USER, content="Initial message") + channel.messages.append(initial_message) + + received_messages = [] + async for message in channel.invoke_stream(agent, received_messages): + assert message is not None + + assert len(received_messages) == 1 + assert "Processed: Initial message" in received_messages[0].content + + @pytest.mark.asyncio async def test_invoke_leftover_in_queue(): channel = ChatHistoryChannel() @@ -110,6 +140,16 @@ async def test_invoke_incorrect_instance_throws(): pass +@pytest.mark.asyncio +async def test_invoke_stream_incorrect_instance_throws(): + channel = ChatHistoryChannel() + agent = MockNonChatHistoryHandler() + + with pytest.raises(ServiceInvalidTypeError): + async for _ in channel.invoke_stream(agent, []): + pass + + @pytest.mark.asyncio async def test_receive(): channel = ChatHistoryChannel() diff --git a/python/tests/unit/agents/test_open_ai_assistant_agent.py b/python/tests/unit/agents/test_open_ai_assistant_agent.py index 18096b90ab15..9de30f4ba96c 100644 --- a/python/tests/unit/agents/test_open_ai_assistant_agent.py +++ b/python/tests/unit/agents/test_open_ai_assistant_agent.py @@ -19,7 +19,7 @@ from semantic_kernel.kernel import Kernel -@pytest.fixture +@pytest.fixture(scope="function") def openai_assistant_agent(kernel: Kernel, openai_unit_test_env): return OpenAIAssistantAgent( kernel=kernel, @@ -35,7 +35,7 @@ def openai_assistant_agent(kernel: Kernel, openai_unit_test_env): ) -@pytest.fixture +@pytest.fixture(scope="function") def mock_assistant(): return Assistant( created_at=123456789, @@ -64,7 +64,7 @@ def mock_assistant(): ) -@pytest.fixture +@pytest.fixture(scope="function") def mock_assistant_json(): return Assistant( created_at=123456789, @@ -247,18 +247,44 @@ async def test_create_agent_second_way(kernel: Kernel, mock_assistant, openai_un @pytest.mark.asyncio -async def test_list_definitions(kernel: Kernel, mock_assistant, openai_unit_test_env): +async def test_list_definitions(kernel: Kernel, openai_unit_test_env): agent = OpenAIAssistantAgent( kernel=kernel, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" ) + assistant = Assistant( + id="test_id", + created_at=123456789, + description="test_description", + instructions="test_instructions", + metadata={ + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + model="test_model", + name="test_name", + object="assistant", + temperature=0.7, + tool_resources=ToolResources( + code_interpreter=ToolResourcesCodeInterpreter(code_interpreter_file_ids=["file1", "file2"]), + file_search=ToolResourcesFileSearch(vector_store_ids=["vector_store1"]), + ), + top_p=0.9, + response_format={"type": "json_object"}, + tools=[{"type": "code_interpreter"}, {"type": "file_search"}], + ) + with patch.object( OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) ) as mock_create_client: mock_client_instance = mock_create_client.return_value mock_client_instance.beta = MagicMock() mock_client_instance.beta.assistants = MagicMock() - mock_client_instance.beta.assistants.list = AsyncMock(return_value=MagicMock(data=[mock_assistant])) + mock_client_instance.beta.assistants.list = AsyncMock(return_value=MagicMock(data=[assistant])) agent.client = mock_client_instance @@ -297,21 +323,53 @@ async def test_list_definitions(kernel: Kernel, mock_assistant, openai_unit_test } +@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) +@pytest.mark.asyncio +async def test_retrieve_agent_missing_chat_model_id_throws(kernel, openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): + _ = await OpenAIAssistantAgent.retrieve( + id="test_id", api_key="test_api_key", kernel=kernel, env_file_path="test.env" + ) + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) @pytest.mark.asyncio -async def test_retrieve_agent(kernel, openai_unit_test_env): - with patch.object( - OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) - ) as mock_create_client: - mock_client_instance = mock_create_client.return_value - mock_client_instance.beta = MagicMock() - mock_client_instance.beta.assistants = MagicMock() +async def test_retrieve_agent_missing_api_key_throws(kernel, openai_unit_test_env): + with pytest.raises( + AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." + ): + _ = await OpenAIAssistantAgent.retrieve(id="test_id", kernel=kernel, env_file_path="test.env") - mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock()) - agent = OpenAIAssistantAgent( - kernel=kernel, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" - ) - OpenAIAssistantBase._create_open_ai_assistant_definition = MagicMock( +def test_open_ai_settings_create_throws(openai_unit_test_env): + with patch("semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings.OpenAISettings.create") as mock_create: + mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") + + with pytest.raises(AgentInitializationException, match="Failed to create OpenAI settings."): + OpenAIAssistantAgent( + service_id="test", api_key="test_api_key", org_id="test_org_id", ai_model_id="test_model_id" + ) + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) +def test_azure_openai_agent_create_missing_chat_model_id_throws(openai_unit_test_env): + with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): + OpenAIAssistantAgent(service_id="test_service", env_file_path="test.env") + + +@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) +def test_azure_openai_agent_create_missing_api_key_throws(openai_unit_test_env): + with pytest.raises( + AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." + ): + OpenAIAssistantAgent(env_file_path="test.env") + + +def test_create_open_ai_assistant_definition_with_json_metadata(mock_assistant_json, openai_unit_test_env): + with ( + patch.object( + OpenAIAssistantBase, + "_create_open_ai_assistant_definition", return_value={ "ai_model_id": "test_model", "description": "test_description", @@ -337,10 +395,81 @@ async def test_retrieve_agent(kernel, openai_unit_test_env): "max_prompt_tokens": 50, "parallel_tool_calls_enabled": True, "truncation_message_count": 10, - } - ) + }, + ) as mock_create_def, + ): + assert mock_create_def.return_value == { + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } - retrieved_agent = await agent.retrieve(id="test_id", api_key="test_api_key", kernel=kernel) + +@pytest.mark.asyncio +async def test_retrieve_agent(kernel, openai_unit_test_env): + with ( + patch.object( + OpenAIAssistantAgent, "_create_client", return_value=MagicMock(spec=AsyncOpenAI) + ) as mock_create_client, + patch.object( + OpenAIAssistantBase, + "_create_open_ai_assistant_definition", + return_value={ + "ai_model_id": "test_model", + "description": "test_description", + "id": "test_id", + "instructions": "test_instructions", + "name": "test_name", + "enable_code_interpreter": True, + "enable_file_search": True, + "enable_json_response": True, + "code_interpreter_file_ids": ["file1", "file2"], + "temperature": 0.7, + "top_p": 0.9, + "vector_store_id": "vector_store1", + "metadata": { + "__run_options": { + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + } + }, + "max_completion_tokens": 100, + "max_prompt_tokens": 50, + "parallel_tool_calls_enabled": True, + "truncation_message_count": 10, + }, + ) as mock_create_def, + ): + mock_client_instance = mock_create_client.return_value + mock_client_instance.beta = MagicMock() + mock_client_instance.beta.assistants = MagicMock() + + mock_client_instance.beta.assistants.retrieve = AsyncMock(return_value=AsyncMock(spec=Assistant)) + + retrieved_agent = await OpenAIAssistantAgent.retrieve(id="test_id", api_key="test_api_key", kernel=kernel) assert retrieved_agent.model_dump( include={ "ai_model_id", @@ -388,116 +517,4 @@ async def test_retrieve_agent(kernel, openai_unit_test_env): "truncation_message_count": 10, } mock_client_instance.beta.assistants.retrieve.assert_called_once_with("test_id") - OpenAIAssistantBase._create_open_ai_assistant_definition.assert_called_once() - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) -@pytest.mark.asyncio -async def test_retrieve_agent_missing_chat_model_id_throws(kernel, openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): - _ = await OpenAIAssistantAgent.retrieve( - id="test_id", api_key="test_api_key", kernel=kernel, env_file_path="test.env" - ) - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) -@pytest.mark.asyncio -async def test_retrieve_agent_missing_api_key_throws(kernel, openai_unit_test_env): - with pytest.raises( - AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." - ): - _ = await OpenAIAssistantAgent.retrieve(id="test_id", kernel=kernel, env_file_path="test.env") - - -def test_open_ai_settings_create_throws(openai_unit_test_env): - with patch("semantic_kernel.connectors.ai.open_ai.settings.open_ai_settings.OpenAISettings.create") as mock_create: - mock_create.side_effect = ValidationError.from_exception_data("test", line_errors=[], input_type="python") - - with pytest.raises(AgentInitializationException, match="Failed to create OpenAI settings."): - OpenAIAssistantAgent( - service_id="test", api_key="test_api_key", org_id="test_org_id", ai_model_id="test_model_id" - ) - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_CHAT_MODEL_ID"]], indirect=True) -def test_azure_openai_agent_create_missing_chat_model_id_throws(openai_unit_test_env): - with pytest.raises(AgentInitializationException, match="The OpenAI chat model ID is required."): - OpenAIAssistantAgent(service_id="test_service", env_file_path="test.env") - - -@pytest.mark.parametrize("exclude_list", [["OPENAI_API_KEY"]], indirect=True) -def test_azure_openai_agent_create_missing_api_key_throws(openai_unit_test_env): - with pytest.raises( - AgentInitializationException, match="The OpenAI API key is required, if a client is not provided." - ): - OpenAIAssistantAgent(env_file_path="test.env") - - -def test_create_open_ai_assistant_definition(mock_assistant, openai_unit_test_env): - agent = OpenAIAssistantAgent( - kernel=None, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" - ) - - definition = agent._create_open_ai_assistant_definition(mock_assistant) - - assert definition == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - - -def test_create_open_ai_assistant_definition_with_json_metadata(mock_assistant_json, openai_unit_test_env): - agent = OpenAIAssistantAgent( - kernel=None, service_id="test_service", name="test_name", instructions="test_instructions", id="test_id" - ) - - definition = agent._create_open_ai_assistant_definition(mock_assistant_json) - - assert definition == { - "ai_model_id": "test_model", - "description": "test_description", - "id": "test_id", - "instructions": "test_instructions", - "name": "test_name", - "enable_code_interpreter": True, - "enable_file_search": True, - "enable_json_response": True, - "code_interpreter_file_ids": ["file1", "file2"], - "temperature": 0.7, - "top_p": 0.9, - "vector_store_id": "vector_store1", - "metadata": { - "__run_options": { - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } - }, - "max_completion_tokens": 100, - "max_prompt_tokens": 50, - "parallel_tool_calls_enabled": True, - "truncation_message_count": 10, - } + mock_create_def.assert_called_once() diff --git a/python/tests/unit/agents/test_open_ai_assistant_base.py b/python/tests/unit/agents/test_open_ai_assistant_base.py index 815ca2b2208e..6f0f5c0bbd60 100644 --- a/python/tests/unit/agents/test_open_ai_assistant_base.py +++ b/python/tests/unit/agents/test_open_ai_assistant_base.py @@ -6,20 +6,33 @@ import pytest from openai import AsyncAzureOpenAI, AsyncOpenAI +from openai.lib.streaming._assistants import AsyncAssistantEventHandler, AsyncAssistantStreamManager from openai.resources.beta.threads.runs.runs import Run from openai.types.beta.assistant import Assistant, ToolResources, ToolResourcesCodeInterpreter, ToolResourcesFileSearch +from openai.types.beta.assistant_stream_event import ( + MessageDeltaEvent, + ThreadMessageDelta, + ThreadRunFailed, + ThreadRunRequiresAction, + ThreadRunStepCompleted, +) from openai.types.beta.assistant_tool import CodeInterpreterTool, FileSearchTool +from openai.types.beta.function_tool import FunctionDefinition, FunctionTool +from openai.types.beta.threads import ImageFileDelta, ImageFileDeltaBlock, MessageDelta, TextDelta, TextDeltaBlock from openai.types.beta.threads.annotation import FileCitationAnnotation, FilePathAnnotation from openai.types.beta.threads.file_citation_annotation import FileCitation +from openai.types.beta.threads.file_citation_delta_annotation import FileCitationDeltaAnnotation from openai.types.beta.threads.file_path_annotation import FilePath from openai.types.beta.threads.image_file import ImageFile from openai.types.beta.threads.image_file_content_block import ImageFileContentBlock from openai.types.beta.threads.required_action_function_tool_call import Function from openai.types.beta.threads.required_action_function_tool_call import Function as RequiredActionFunction from openai.types.beta.threads.run import ( + LastError, RequiredAction, RequiredActionFunctionToolCall, RequiredActionSubmitToolOutputs, + TruncationStrategy, ) from openai.types.beta.threads.runs import RunStep from openai.types.beta.threads.runs.code_interpreter_tool_call import ( @@ -29,6 +42,7 @@ from openai.types.beta.threads.runs.function_tool_call import Function as RunsFunction from openai.types.beta.threads.runs.function_tool_call import FunctionToolCall from openai.types.beta.threads.runs.message_creation_step_details import MessageCreation, MessageCreationStepDetails +from openai.types.beta.threads.runs.run_step import Usage from openai.types.beta.threads.runs.tool_calls_step_details import ToolCallsStepDetails from openai.types.beta.threads.text import Text from openai.types.beta.threads.text_content_block import TextContentBlock @@ -140,13 +154,15 @@ class MockMessage: @pytest.fixture def mock_thread_messages(): class MockMessage: - def __init__(self, role, content, assistant_id=None): + def __init__(self, id, role, content, assistant_id=None): + self.id = id self.role = role self.content = content self.assistant_id = assistant_id return [ MockMessage( + id="test_message_id_1", role="user", content=[ TextContentBlock( @@ -174,6 +190,7 @@ def __init__(self, role, content, assistant_id=None): ], ), MockMessage( + id="test_message_id_2", role="assistant", content=[ ImageFileContentBlock(type="image_file", image_file=ImageFile(file_id="test_file_id", detail="auto")) @@ -284,6 +301,7 @@ def __init__(self): ] ), ) + self.last_error = None def update_status(self): self.poll_count += 1 @@ -354,6 +372,241 @@ def __init__(self): ) +class MockEvent: + def __init__(self, event, data): + self.event = event + self.data = data + + +class MockRunData: + def __init__(self, id, status): + self.id = id + self.status = status + # Add other attributes as needed + + +def create_thread_message_delta_mock(): + return ThreadMessageDelta( + data=MessageDeltaEvent( + id="mock_msg_id", + delta=MessageDelta( + content=[ + TextDeltaBlock( + index=0, + type="text", + text=TextDelta( + annotations=[ + FileCitationDeltaAnnotation( + index=0, + type="file_citation", + start_index=1, + end_index=3, + text="annotation", + ) + ], + value="Hello", + ), + ), + ImageFileDeltaBlock( + index=0, + type="image_file", + image_file=ImageFileDelta( + file_id="test_file_id", + detail="auto", + ), + ), + ], + role=None, + ), + object="thread.message.delta", + ), + event="thread.message.delta", + ) + + +def mock_thread_requires_action_run(): + return ThreadRunRequiresAction( + data=Run( + id="run_00OwjJnEg2SGJy8sky7ip35P", + assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", + cancelled_at=None, + completed_at=None, + created_at=1727798684, + expires_at=1727799284, + failed_at=None, + incomplete_details=None, + instructions="Answer questions about the menu.", + last_error=None, + max_completion_tokens=None, + max_prompt_tokens=None, + metadata={}, + model="gpt-4o-2024-08-06", + object="thread.run", + parallel_tool_calls=True, + required_action=RequiredAction( + submit_tool_outputs=RequiredActionSubmitToolOutputs( + tool_calls=[ + RequiredActionFunctionToolCall( + id="call_OTcZMjhm7WbhFnGkrmUjs68T", + function=Function(arguments="{}", name="menu-get_specials"), + type="function", + ) + ] + ), + type="submit_tool_outputs", + ), + response_format="auto", + started_at=1727798685, + status="requires_action", + thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", + tool_choice="auto", + tools=[ + FunctionTool( + function=FunctionDefinition( + name="menu-get_item_price", + description="Provides the price of the requested menu item.", + parameters={ + "type": "object", + "properties": { + "menu_item": {"type": "string", "description": "The name of the menu item."} + }, + "required": ["menu_item"], + }, + strict=False, + ), + type="function", + ), + FunctionTool( + function=FunctionDefinition( + name="menu-get_specials", + description="Provides a list of specials from the menu.", + parameters={"type": "object", "properties": {}, "required": []}, + strict=False, + ), + type="function", + ), + ], + truncation_strategy=TruncationStrategy(type="auto", last_messages=None), + usage=None, + temperature=1.0, + top_p=1.0, + tool_resources={"code_interpreter": {"file_ids": []}}, + ), + event="thread.run.requires_action", + ) + + +def mock_thread_run_step_completed(): + return ThreadRunStepCompleted( + data=RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=MessageCreationStepDetails( + type="message_creation", message_creation=MessageCreation(message_id="test") + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), + ), + event="thread.run.step.completed", + ) + + +def mock_thread_run_step_completed_with_code(): + return ThreadRunStepCompleted( + data=RunStep( + id="step_id_2", + type="message_creation", + completed_at=int(datetime.now(timezone.utc).timestamp()), + created_at=int((datetime.now(timezone.utc) - timedelta(minutes=2)).timestamp()), + step_details=ToolCallsStepDetails( + type="tool_calls", + tool_calls=[ + CodeInterpreterToolCall( + id="tool_call_id", + code_interpreter=CodeInterpreter(input="test code", outputs=[]), + type="code_interpreter", + ) + ], + ), + assistant_id="assistant_id", + object="thread.run.step", + run_id="run_id", + status="completed", + thread_id="thread_id", + usage=Usage(completion_tokens=10, prompt_tokens=5, total_tokens=15), + ), + event="thread.run.step.completed", + ) + + +def mock_run_with_last_error(): + return ThreadRunFailed( + data=Run( + id="run_00OwjJnEg2SGJy8sky7ip35P", + assistant_id="asst_wMMAX5F59szE7YHrCKSSgJlE", + cancelled_at=None, + completed_at=None, + created_at=1727798684, + expires_at=1727799284, + failed_at=None, + incomplete_details=None, + instructions="Answer questions about the menu.", + last_error=LastError(code="server_error", message="Server error"), + max_completion_tokens=None, + max_prompt_tokens=None, + metadata={}, + model="gpt-4o-2024-08-06", + object="thread.run", + parallel_tool_calls=True, + required_action=None, + response_format="auto", + started_at=1727798685, + status="failed", + thread_id="thread_jR4ZLlUwSrPcsLfdnGyFxi4Z", + tool_choice="auto", + tools=[], + truncation_strategy=TruncationStrategy(type="auto", last_messages=None), + usage=None, + temperature=1.0, + top_p=1.0, + tool_resources={"code_interpreter": {"file_ids": []}}, + ), + event="thread.run.failed", + ) + + +class MockAsyncIterable: + def __init__(self, items): + self.items = items.copy() + + def __aiter__(self): + self._iter = iter(self.items) + return self + + async def __anext__(self): + try: + return next(self._iter) + except StopIteration: + raise StopAsyncIteration + + +class MockStream: + def __init__(self, events): + self.events = events + + async def __aenter__(self): + return MockAsyncIterable(self.events) + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + # endregion # region Tests @@ -810,9 +1063,9 @@ async def test_add_chat_message( async def test_add_chat_message_invalid_role( azure_openai_assistant_agent, mock_chat_message_content, openai_unit_test_env ): - mock_chat_message_content.role = AuthorRole.TOOL + mock_chat_message_content.role = AuthorRole.SYSTEM - with pytest.raises(AgentExecutionException, match="Invalid message role `tool`"): + with pytest.raises(AgentExecutionException, match="Invalid message role `system`"): await azure_openai_assistant_agent.add_chat_message("test_thread_id", mock_chat_message_content) @@ -900,6 +1153,121 @@ def mock_get_function_call_contents(run, function_steps): _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] +@pytest.mark.asyncio +async def test_invoke_stream( + azure_openai_assistant_agent, + mock_assistant, + mock_thread_messages, + azure_openai_unit_test_env, +): + events = [ + MockEvent("thread.run.created", MockRunData(id="run_1", status="queued")), + MockEvent("thread.run.in_progress", MockRunData(id="run_1", status="in_progress")), + create_thread_message_delta_mock(), + mock_thread_run_step_completed(), + MockEvent("thread.run.completed", MockRunData(id="run_1", status="completed")), + mock_thread_requires_action_run(), + ] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + messages = [] + async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): + assert content is not None + + assert len(messages) > 0 + + +@pytest.mark.asyncio +async def test_invoke_stream_code_output( + azure_openai_assistant_agent, + mock_assistant, + azure_openai_unit_test_env, +): + events = [mock_thread_run_step_completed_with_code()] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + messages = [] + async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): + assert content is not None + assert content.metadata.get("code") is True + + +@pytest.mark.asyncio +async def test_invoke_stream_requires_action( + azure_openai_assistant_agent, mock_assistant, mock_thread_messages, azure_openai_unit_test_env +): + events = [ + mock_thread_requires_action_run(), + ] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + messages = [] + async for content in azure_openai_assistant_agent.invoke_stream("thread_id", messages=messages): + assert content is not None + + assert len(messages) > 0 + + +@pytest.mark.asyncio +async def test_invoke_stream_throws_exception( + azure_openai_assistant_agent, mock_assistant, mock_thread_messages, azure_openai_unit_test_env +): + events = [ + mock_run_with_last_error(), + ] + + with patch.object(azure_openai_assistant_agent, "client", spec=AsyncAzureOpenAI) as mock_client: + mock_client.beta = MagicMock() + mock_client.beta.threads = MagicMock() + mock_client.beta.assistants = MagicMock() + mock_client.beta.assistants.create = AsyncMock(return_value=mock_assistant) + + mock_client.beta.threads.runs = MagicMock() + mock_client.beta.threads.runs.stream = MagicMock(return_value=MockStream(events)) + + mock_client.beta.threads.messages.retrieve = AsyncMock(side_effect=mock_thread_messages) + + azure_openai_assistant_agent.assistant = await azure_openai_assistant_agent.create_assistant() + + with pytest.raises(AgentInvokeException): + async for _ in azure_openai_assistant_agent.invoke_stream("thread_id"): + pass + + @pytest.mark.asyncio async def test_invoke_assistant_not_initialized_throws(azure_openai_assistant_agent, openai_unit_test_env): with pytest.raises(AgentInitializationException, match="The assistant has not been created."): @@ -957,6 +1325,20 @@ async def mock_poll_run_status(run, thread_id): _ = [message async for message in azure_openai_assistant_agent.invoke("thread_id")] +@pytest.fixture +def mock_streaming_assistant_stream_manager() -> AsyncAssistantStreamManager[AsyncAssistantEventHandler]: + assistant_event_handler = AsyncAssistantEventHandler() + + mock_stream = AsyncMock() + mock_stream.__aiter__.return_value = [assistant_event_handler] + + mock_manager = AsyncMock(spec=AsyncAssistantStreamManager) + mock_manager.__aenter__.return_value = mock_stream + mock_manager.__aexit__.return_value = None + + return mock_manager + + def test_format_tool_outputs(azure_openai_assistant_agent, openai_unit_test_env): chat_history = ChatHistory() fcc = FunctionCallContent( @@ -965,8 +1347,8 @@ def test_format_tool_outputs(azure_openai_assistant_agent, openai_unit_test_env) frc = FunctionResultContent.from_function_call_content_and_result(fcc, 123, {"test2": "test2"}) chat_history.add_message(message=frc.to_chat_message_content()) - tool_outputs = azure_openai_assistant_agent._format_tool_outputs(chat_history) - assert tool_outputs[0] == {"tool_call_id": "test", "output": 123} + tool_outputs = azure_openai_assistant_agent._format_tool_outputs([fcc], chat_history) + assert tool_outputs[0] == {"tool_call_id": "test", "output": "123"} @pytest.mark.asyncio diff --git a/python/tests/unit/agents/test_open_ai_assistant_channel.py b/python/tests/unit/agents/test_open_ai_assistant_channel.py index 899ae3799b2c..7f45e88e4de4 100644 --- a/python/tests/unit/agents/test_open_ai_assistant_channel.py +++ b/python/tests/unit/agents/test_open_ai_assistant_channel.py @@ -13,8 +13,10 @@ from openai.types.beta.threads.text import Text from openai.types.beta.threads.text_content_block import TextContentBlock +from semantic_kernel.agents.chat_completion.chat_completion_agent import ChatCompletionAgent from semantic_kernel.agents.open_ai.open_ai_assistant_base import OpenAIAssistantBase from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.function_call_content import FunctionCallContent from semantic_kernel.contents.text_content import TextContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentChatException @@ -137,6 +139,64 @@ async def mock_invoke_internal(*args, **kwargs): assert isinstance(message, ChatMessageContent) +@pytest.mark.asyncio +async def test_invoke_agent_invalid_instance_throws(): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=ChatCompletionAgent) + agent._is_deleted = False + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + + with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantBase)}."): + async for _, _ in channel.invoke(agent): + pass + + +@pytest.mark.asyncio +async def test_invoke_streaming_agent(): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=OpenAIAssistantBase) + agent._is_deleted = False + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + + results = [] + + async def mock_invoke_internal(*args, **kwargs): + for _ in range(3): + msg = MagicMock(spec=ChatMessageContent) + yield msg + results.append(msg) + + agent._invoke_internal_stream.side_effect = mock_invoke_internal + + async for message in channel.invoke_stream(agent, results): + assert message is not None + + assert len(results) == 3 + for message in results: + assert isinstance(message, ChatMessageContent) + + +@pytest.mark.asyncio +async def test_invoke_streaming_agent_invalid_instance_throws(): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=ChatCompletionAgent) + agent._is_deleted = False + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + + with pytest.raises(AgentChatException, match=f"Agent is not of the expected type {type(OpenAIAssistantBase)}."): + async for _ in channel.invoke_stream(agent, []): + pass + + @pytest.mark.asyncio async def test_invoke_agent_deleted(): from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel @@ -152,6 +212,21 @@ async def test_invoke_agent_deleted(): pass +@pytest.mark.asyncio +async def test_invoke_streaming_agent_deleted(): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + client = MagicMock(spec=AsyncOpenAI) + thread_id = "test_thread" + agent = MagicMock(spec=OpenAIAssistantBase) + agent._is_deleted = True + channel = OpenAIAssistantChannel(client=client, thread_id=thread_id) + + with pytest.raises(AgentChatException, match="Agent is deleted"): + async for _ in channel.invoke_stream(agent, []): + pass + + @pytest.mark.asyncio async def test_invoke_agent_wrong_type(): from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel @@ -260,3 +335,16 @@ async def mock_retrieve_assistant(*args, **kwargs) -> Any: with pytest.raises(Exception, match="Test error"): await channel.reset() + + +@pytest.mark.asyncio +async def test_channel_receive_fcc_skipped(openai_unit_test_env): + from semantic_kernel.agents.channels.open_ai_assistant_channel import OpenAIAssistantChannel + + message = ChatMessageContent(role=AuthorRole.ASSISTANT, items=[FunctionCallContent(function_name="test_function")]) + + client = MagicMock(spec=AsyncOpenAI) + + channel = OpenAIAssistantChannel(client=client, thread_id="test_thread") + + await channel.receive([message]) diff --git a/python/tests/unit/connectors/anthropic/services/test_anthropic_chat_completion.py b/python/tests/unit/connectors/anthropic/services/test_anthropic_chat_completion.py index 11c7882b49df..072612c5d5e6 100644 --- a/python/tests/unit/connectors/anthropic/services/test_anthropic_chat_completion.py +++ b/python/tests/unit/connectors/anthropic/services/test_anthropic_chat_completion.py @@ -1,68 +1,435 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import AsyncMock, MagicMock +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, patch import pytest from anthropic import AsyncAnthropic +from anthropic.lib.streaming import TextEvent +from anthropic.lib.streaming._types import InputJsonEvent +from anthropic.types import ( + ContentBlockStopEvent, + InputJSONDelta, + Message, + MessageDeltaUsage, + MessageStopEvent, + RawContentBlockDeltaEvent, + RawContentBlockStartEvent, + RawMessageDeltaEvent, + RawMessageStartEvent, + TextBlock, + TextDelta, + ToolUseBlock, + Usage, +) +from anthropic.types.raw_message_delta_event import Delta from semantic_kernel.connectors.ai.anthropic.prompt_execution_settings.anthropic_prompt_execution_settings import ( AnthropicChatPromptExecutionSettings, ) from semantic_kernel.connectors.ai.anthropic.services.anthropic_chat_completion import AnthropicChatCompletion from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIChatPromptExecutionSettings, ) -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.exceptions import ServiceInitializationError, ServiceResponseException +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ( + ChatMessageContent, + FunctionCallContent, + FunctionResultContent, + TextContent, +) +from semantic_kernel.contents.const import ContentTypes +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent, StreamingTextContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.contents.utils.finish_reason import FinishReason +from semantic_kernel.exceptions import ( + ServiceInitializationError, + ServiceInvalidExecutionSettingsError, + ServiceResponseException, +) +from semantic_kernel.functions.function_result import FunctionResult from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function +from semantic_kernel.functions.kernel_function_from_method import KernelFunctionMetadata +from semantic_kernel.functions.kernel_parameter_metadata import KernelParameterMetadata from semantic_kernel.kernel import Kernel +@pytest.fixture +def mock_tool_calls_message() -> ChatMessageContent: + return ChatMessageContent( + inner_content=Message( + id="test_message_id", + content=[ + TextBlock(text="", type="text"), + ToolUseBlock( + id="test_tool_use_blocks", + input={"input": 3, "amount": 3}, + name="math-Add", + type="tool_use", + ), + ], + model="claude-3-opus-20240229", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + type="message", + usage=Usage(input_tokens=1720, output_tokens=194), + ), + ai_model_id="claude-3-opus-20240229", + metadata={}, + content_type="message", + role=AuthorRole.ASSISTANT, + name=None, + items=[ + FunctionCallContent( + inner_content=None, + ai_model_id=None, + metadata={}, + content_type=ContentTypes.FUNCTION_CALL_CONTENT, + id="test_function_call_content", + index=1, + name="math-Add", + function_name="Add", + plugin_name="math", + arguments={"input": 3, "amount": 3}, + ), + TextContent( + inner_content=None, + ai_model_id=None, + metadata={}, + content_type="text", + text="", + encoding=None, + ), + ], + encoding=None, + finish_reason=FinishReason.TOOL_CALLS, + ) + + +@pytest.fixture +def mock_streaming_tool_calls_message() -> list: + stream_events = [ + RawMessageStartEvent( + message=Message( + id="test_message_id", + content=[], + model="claude-3-opus-20240229", + role="assistant", + stop_reason=None, + stop_sequence=None, + type="message", + usage=Usage(input_tokens=1720, output_tokens=2), + ), + type="message_start", + ), + RawContentBlockStartEvent(content_block=TextBlock(text="", type="text"), index=0, type="content_block_start"), + RawContentBlockDeltaEvent( + delta=TextDelta(text="", type="text_delta"), index=0, type="content_block_delta" + ), + TextEvent(type="text", text="", snapshot=""), + RawContentBlockDeltaEvent( + delta=TextDelta(text="", type="text_delta"), index=0, type="content_block_delta" + ), + TextEvent(type="text", text="", snapshot=""), + ContentBlockStopEvent( + index=0, type="content_block_stop", content_block=TextBlock(text="", type="text") + ), + RawContentBlockStartEvent( + content_block=ToolUseBlock(id="test_tool_use_message_id", input={}, name="math-Add", type="tool_use"), + index=1, + type="content_block_start", + ), + RawContentBlockDeltaEvent( + delta=InputJSONDelta(partial_json='{"input": 3, "amount": 3}', type="input_json_delta"), + index=1, + type="content_block_delta", + ), + InputJsonEvent(type="input_json", partial_json='{"input": 3, "amount": 3}', snapshot={"input": 3, "amount": 3}), + ContentBlockStopEvent( + index=1, + type="content_block_stop", + content_block=ToolUseBlock( + id="test_tool_use_block_id", input={"input": 3, "amount": 3}, name="math-Add", type="tool_use" + ), + ), + RawMessageDeltaEvent( + delta=Delta(stop_reason="tool_use", stop_sequence=None), + type="message_delta", + usage=MessageDeltaUsage(output_tokens=159), + ), + MessageStopEvent( + type="message_stop", + message=Message( + id="test_message_id", + content=[ + TextBlock(text="", type="text"), + ToolUseBlock( + id="test_tool_use_block_id", input={"input": 3, "amount": 3}, name="math-Add", type="tool_use" + ), + ], + model="claude-3-opus-20240229", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + type="message", + usage=Usage(input_tokens=100, output_tokens=100), + ), + ), + ] + + async def async_generator(): + for event in stream_events: + yield event + + stream_mock = AsyncMock() + stream_mock.__aenter__.return_value = async_generator() + + return stream_mock + + +@pytest.fixture +def mock_tool_call_result_message() -> ChatMessageContent: + return ChatMessageContent( + inner_content=None, + ai_model_id=None, + metadata={}, + content_type="message", + role=AuthorRole.TOOL, + name=None, + items=[ + FunctionResultContent( + id="tool_01", + inner_content=FunctionResult( + function=KernelFunctionMetadata( + name="Add", + plugin_name="math", + description="Returns the Addition result of the values provided.", + parameters=[ + KernelParameterMetadata( + name="input", + description="the first number to add", + default_value=None, + type_="int", + is_required=True, + type_object=int, + schema_data={"type": "integer", "description": "the first number to add"}, + function_schema_include=True, + ), + KernelParameterMetadata( + name="amount", + description="the second number to add", + default_value=None, + type_="int", + is_required=True, + type_object=int, + schema_data={"type": "integer", "description": "the second number to add"}, + function_schema_include=True, + ), + ], + is_prompt=False, + is_asynchronous=False, + return_parameter=KernelParameterMetadata( + name="return", + description="the output is a number", + default_value=None, + type_="int", + is_required=True, + type_object=int, + schema_data={"type": "integer", "description": "the output is a number"}, + function_schema_include=True, + ), + additional_properties={}, + ), + value=6, + metadata={}, + ), + value=6, + ) + ], + encoding=None, + finish_reason=FinishReason.TOOL_CALLS, + ) + + +# mock StreamingChatMessageContent +@pytest.fixture +def mock_streaming_chat_message_content() -> StreamingChatMessageContent: + return StreamingChatMessageContent( + choice_index=0, + inner_content=[ + RawContentBlockDeltaEvent( + delta=TextDelta(text="", type="text_delta"), index=0, type="content_block_delta" + ), + RawContentBlockDeltaEvent( + delta=TextDelta(text="", type="text_delta"), index=0, type="content_block_delta" + ), + ContentBlockStopEvent( + index=1, + type="content_block_stop", + content_block=ToolUseBlock( + id="tool_id", + input={"input": 3, "amount": 3}, + name="math-Add", + type="tool_use", + ), + ), + RawMessageDeltaEvent( + delta=Delta(stop_reason="tool_use", stop_sequence=None), + type="message_delta", + usage=MessageDeltaUsage(output_tokens=175), + ), + ], + ai_model_id="claude-3-opus-20240229", + metadata={}, + role=AuthorRole.ASSISTANT, + name=None, + items=[ + StreamingTextContent( + inner_content=None, + ai_model_id=None, + metadata={}, + content_type="text", + text="", + encoding=None, + choice_index=0, + ), + FunctionCallContent( + inner_content=None, + ai_model_id=None, + metadata={}, + content_type=ContentTypes.FUNCTION_CALL_CONTENT, + id="tool_id", + index=0, + name="math-Add", + function_name="Add", + plugin_name="math", + arguments='{"input": 3, "amount": 3}', + ), + ], + encoding=None, + finish_reason=FinishReason.TOOL_CALLS, + ) + + @pytest.fixture def mock_settings() -> AnthropicChatPromptExecutionSettings: return AnthropicChatPromptExecutionSettings() @pytest.fixture -def mock_anthropic_client_completion() -> AsyncAnthropic: - client = MagicMock(spec=AsyncAnthropic) - chat_completion_response = AsyncMock() - - content = [MagicMock(finish_reason="stop", message=MagicMock(role="assistant", content="Test"))] - chat_completion_response.content = content +def mock_chat_message_response() -> Message: + return Message( + id="test_message_id", + content=[TextBlock(text="Hello, how are you?", type="text")], + model="claude-3-opus-20240229", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + type="message", + usage=Usage(input_tokens=10, output_tokens=10), + ) - # Create a MagicMock for the messages attribute - messages_mock = MagicMock() - messages_mock.create = chat_completion_response - # Assign the messages_mock to the client.messages attribute - client.messages = messages_mock - return client +@pytest.fixture +def mock_streaming_message_response() -> AsyncGenerator: + raw_message_start_event = RawMessageStartEvent( + message=Message( + id="test_message_id", + content=[], + model="claude-3-opus-20240229", + role="assistant", + stop_reason=None, + stop_sequence=None, + type="message", + usage=Usage(input_tokens=41, output_tokens=3), + ), + type="message_start", + ) + raw_content_block_start_event = RawContentBlockStartEvent( + content_block=TextBlock(text="", type="text"), + index=0, + type="content_block_start", + ) -@pytest.fixture -def mock_anthropic_client_completion_stream() -> AsyncAnthropic: - client = MagicMock(spec=AsyncAnthropic) - chat_completion_response = MagicMock() + raw_content_block_delta_event = RawContentBlockDeltaEvent( + delta=TextDelta(text="Hello! It", type="text_delta"), + index=0, + type="content_block_delta", + ) + + text_event = TextEvent( + type="text", + text="Hello! It", + snapshot="Hello! It", + ) - content = [ - MagicMock(finish_reason="stop", delta=MagicMock(role="assistant", content="Test")), - MagicMock(finish_reason="stop", delta=MagicMock(role="assistant", content="Test", tool_calls=None)), + content_block_stop_event = ContentBlockStopEvent( + index=0, + type="content_block_stop", + content_block=TextBlock(text="Hello! It's nice to meet you.", type="text"), + ) + + raw_message_delta_event = RawMessageDeltaEvent( + delta=Delta(stop_reason="end_turn", stop_sequence=None), + type="message_delta", + usage=MessageDeltaUsage(output_tokens=84), + ) + + message_stop_event = MessageStopEvent( + type="message_stop", + message=Message( + id="test_message_stop_id", + content=[TextBlock(text="Hello! It's nice to meet you.", type="text")], + model="claude-3-opus-20240229", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + type="message", + usage=Usage(input_tokens=41, output_tokens=84), + ), + ) + + # Combine all mock events into a list + stream_events = [ + raw_message_start_event, + raw_content_block_start_event, + raw_content_block_delta_event, + text_event, + content_block_stop_event, + raw_message_delta_event, + message_stop_event, ] - chat_completion_response.content = content - chat_completion_response_empty = MagicMock() - chat_completion_response_empty.content = [] + async def async_generator(): + for event in stream_events: + yield event + + # Create an AsyncMock for the stream + stream_mock = AsyncMock() + stream_mock.__aenter__.return_value = async_generator() + + return stream_mock + + +@pytest.fixture +def mock_anthropic_client_completion(mock_chat_message_response: Message) -> AsyncAnthropic: + client = MagicMock(spec=AsyncAnthropic) + messages_mock = MagicMock() + messages_mock.create = AsyncMock(return_value=mock_chat_message_response) + client.messages = messages_mock + return client + - # Create a MagicMock for the messages attribute +@pytest.fixture +def mock_anthropic_client_completion_stream(mock_streaming_message_response: AsyncGenerator) -> AsyncAnthropic: + client = MagicMock(spec=AsyncAnthropic) messages_mock = MagicMock() - messages_mock.stream = chat_completion_response - - generator_mock = MagicMock() - generator_mock.__aiter__.return_value = [chat_completion_response_empty, chat_completion_response] - + messages_mock.stream.return_value = mock_streaming_message_response client.messages = messages_mock - return client @@ -70,34 +437,127 @@ def mock_anthropic_client_completion_stream() -> AsyncAnthropic: async def test_complete_chat_contents( kernel: Kernel, mock_settings: AnthropicChatPromptExecutionSettings, - mock_anthropic_client_completion: AsyncAnthropic, + mock_chat_message_response: Message, ): - chat_history = MagicMock() + client = MagicMock(spec=AsyncAnthropic) + messages_mock = MagicMock() + messages_mock.create = AsyncMock(return_value=mock_chat_message_response) + client.messages = messages_mock + + chat_history = ChatHistory() + chat_history.add_user_message("test_user_message") + arguments = KernelArguments() chat_completion_base = AnthropicChatCompletion( - ai_model_id="test_model_id", service_id="test", api_key="", async_client=mock_anthropic_client_completion + ai_model_id="test_model_id", service_id="test", api_key="", async_client=client ) content: list[ChatMessageContent] = await chat_completion_base.get_chat_message_contents( chat_history=chat_history, settings=mock_settings, kernel=kernel, arguments=arguments ) - assert content is not None + + assert len(content) > 0 + assert content[0].content != "" + assert content[0].role == AuthorRole.ASSISTANT +mock_message_text_content = ChatMessageContent(role=AuthorRole.ASSISTANT, items=[TextContent(text="test")]) + +mock_message_function_call = ChatMessageContent( + role=AuthorRole.ASSISTANT, + items=[ + FunctionCallContent( + name="test", + arguments={"key": "test"}, + ) + ], +) + + +@pytest.mark.parametrize( + "function_choice_behavior,model_responses,expected_result", + [ + pytest.param( + FunctionChoiceBehavior.Auto(), + [[mock_message_function_call], [mock_message_text_content]], + TextContent, + id="auto", + ), + pytest.param( + FunctionChoiceBehavior.Auto(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="auto_none_invoke", + ), + pytest.param( + FunctionChoiceBehavior.Required(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="required_none_invoke", + ), + ], +) @pytest.mark.asyncio -async def test_complete_chat_stream_contents( +async def test_complete_chat_contents_function_call_behavior_tool_call( kernel: Kernel, mock_settings: AnthropicChatPromptExecutionSettings, - mock_anthropic_client_completion_stream: AsyncAnthropic, + function_choice_behavior: FunctionChoiceBehavior, + model_responses, + expected_result, +): + kernel.add_function("test", kernel_function(lambda key: "test", name="test")) + mock_settings.function_choice_behavior = function_choice_behavior + + arguments = KernelArguments() + chat_completion_base = AnthropicChatCompletion(ai_model_id="test_model_id", service_id="test", api_key="") + + with ( + patch.object(chat_completion_base, "_inner_get_chat_message_contents", side_effect=model_responses), + ): + response: list[ChatMessageContent] = await chat_completion_base.get_chat_message_contents( + chat_history=ChatHistory(system_message="Test"), settings=mock_settings, kernel=kernel, arguments=arguments + ) + + assert all(isinstance(content, expected_result) for content in response[0].items) + + +@pytest.mark.asyncio +async def test_complete_chat_contents_function_call_behavior_without_kernel( + mock_settings: AnthropicChatPromptExecutionSettings, + mock_anthropic_client_completion: AsyncAnthropic, ): chat_history = MagicMock() + chat_completion_base = AnthropicChatCompletion( + ai_model_id="test_model_id", service_id="test", api_key="", async_client=mock_anthropic_client_completion + ) + + mock_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + with pytest.raises(ServiceInvalidExecutionSettingsError): + await chat_completion_base.get_chat_message_contents(chat_history=chat_history, settings=mock_settings) + + +@pytest.mark.asyncio +async def test_complete_chat_stream_contents( + kernel: Kernel, + mock_settings: AnthropicChatPromptExecutionSettings, + mock_streaming_message_response, +): + client = MagicMock(spec=AsyncAnthropic) + messages_mock = MagicMock() + messages_mock.stream.return_value = mock_streaming_message_response + client.messages = messages_mock + + chat_history = ChatHistory() + chat_history.add_user_message("test_user_message") + arguments = KernelArguments() chat_completion_base = AnthropicChatCompletion( ai_model_id="test_model_id", service_id="test", api_key="", - async_client=mock_anthropic_client_completion_stream, + async_client=client, ) async for content in chat_completion_base.get_streaming_chat_message_contents( @@ -106,19 +566,80 @@ async def test_complete_chat_stream_contents( assert content is not None +mock_message_function_call = StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, items=[FunctionCallContent(name="test")], choice_index="0" +) + +mock_message_text_content = StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, items=[TextContent(text="test")], choice_index="0" +) + + +@pytest.mark.parametrize( + "function_choice_behavior,model_responses,expected_result", + [ + pytest.param( + FunctionChoiceBehavior.Auto(), + [[mock_message_function_call], [mock_message_text_content]], + TextContent, + id="auto", + ), + pytest.param( + FunctionChoiceBehavior.Auto(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="auto_none_invoke", + ), + pytest.param( + FunctionChoiceBehavior.Required(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="required_none_invoke", + ), + pytest.param(FunctionChoiceBehavior.NoneInvoke(), [[mock_message_text_content]], TextContent, id="none"), + ], +) @pytest.mark.asyncio -async def test_anthropic_sdk_exception(kernel: Kernel, mock_settings: AnthropicChatPromptExecutionSettings): - chat_history = MagicMock() +async def test_complete_chat_contents_streaming_function_call_behavior_tool_call( + kernel: Kernel, + mock_settings: AnthropicChatPromptExecutionSettings, + function_choice_behavior: FunctionChoiceBehavior, + model_responses, + expected_result, +): + mock_settings.function_choice_behavior = function_choice_behavior + + # Mock sequence of model responses + generator_mocks = [] + for mock_message in model_responses: + generator_mock = MagicMock() + generator_mock.__aiter__.return_value = [mock_message] + generator_mocks.append(generator_mock) + arguments = KernelArguments() - client = MagicMock(spec=AsyncAnthropic) + chat_completion_base = AnthropicChatCompletion(ai_model_id="test_model_id", service_id="test", api_key="") + + with patch.object(chat_completion_base, "_inner_get_streaming_chat_message_contents", side_effect=generator_mocks): + messages = [] + async for chunk in chat_completion_base.get_streaming_chat_message_contents( + chat_history=ChatHistory(system_message="Test"), settings=mock_settings, kernel=kernel, arguments=arguments + ): + messages.append(chunk) - # Create a MagicMock for the messages attribute + response = messages[-1] + assert all(isinstance(content, expected_result) for content in response[0].items) + + +@pytest.mark.asyncio +async def test_anthropic_sdk_exception(kernel: Kernel, mock_settings: AnthropicChatPromptExecutionSettings): + client = MagicMock(spec=AsyncAnthropic) messages_mock = MagicMock() messages_mock.create.side_effect = Exception("Test Exception") - - # Assign the messages_mock to the client.messages attribute client.messages = messages_mock + chat_history = MagicMock() + arguments = KernelArguments() + chat_completion_base = AnthropicChatCompletion( ai_model_id="test_model_id", service_id="test", api_key="", async_client=client ) @@ -131,16 +652,14 @@ async def test_anthropic_sdk_exception(kernel: Kernel, mock_settings: AnthropicC @pytest.mark.asyncio async def test_anthropic_sdk_exception_streaming(kernel: Kernel, mock_settings: AnthropicChatPromptExecutionSettings): - chat_history = MagicMock() - arguments = KernelArguments() client = MagicMock(spec=AsyncAnthropic) - - # Create a MagicMock for the messages attribute messages_mock = MagicMock() messages_mock.stream.side_effect = Exception("Test Exception") - client.messages = messages_mock + chat_history = MagicMock() + arguments = KernelArguments() + chat_completion_base = AnthropicChatCompletion( ai_model_id="test_model_id", service_id="test", api_key="", async_client=client ) @@ -218,5 +737,144 @@ async def test_with_different_execution_settings_stream( async for chunk in chat_completion_base.get_streaming_chat_message_contents( chat_history, settings, kernel=kernel, arguments=arguments ): - continue + assert chunk is not None assert mock_anthropic_client_completion_stream.messages.stream.call_args.kwargs["temperature"] == 0.2 + + +@pytest.mark.asyncio +async def test_prepare_chat_history_for_request_with_system_message(mock_anthropic_client_completion_stream: MagicMock): + chat_history = ChatHistory() + chat_history.add_system_message("System message") + chat_history.add_user_message("User message") + chat_history.add_assistant_message("Assistant message") + chat_history.add_system_message("Another system message") + + chat_completion_base = AnthropicChatCompletion( + ai_model_id="test_model_id", + service_id="test", + api_key="", + async_client=mock_anthropic_client_completion_stream, + ) + + remaining_messages, system_message_content = chat_completion_base._prepare_chat_history_for_request( + chat_history, role_key="role", content_key="content" + ) + + assert system_message_content == "System message" + assert remaining_messages == [ + {"role": AuthorRole.USER, "content": "User message"}, + {"role": AuthorRole.ASSISTANT, "content": "Assistant message"}, + ] + assert not any(msg["role"] == AuthorRole.SYSTEM for msg in remaining_messages) + + +@pytest.mark.asyncio +async def test_prepare_chat_history_for_request_with_tool_message( + mock_anthropic_client_completion_stream: MagicMock, + mock_tool_calls_message: ChatMessageContent, + mock_tool_call_result_message: ChatMessageContent, +): + chat_history = ChatHistory() + chat_history.add_user_message("What is 3+3?") + chat_history.add_message(mock_tool_calls_message) + chat_history.add_message(mock_tool_call_result_message) + + chat_completion_client = AnthropicChatCompletion( + ai_model_id="test_model_id", + service_id="test", + api_key="", + async_client=mock_anthropic_client_completion_stream, + ) + + remaining_messages, system_message_content = chat_completion_client._prepare_chat_history_for_request( + chat_history, role_key="role", content_key="content" + ) + + assert system_message_content is None + assert len(remaining_messages) == 3 + + +@pytest.mark.asyncio +async def test_prepare_chat_history_for_request_with_tool_message_streaming( + mock_anthropic_client_completion_stream: MagicMock, + mock_streaming_chat_message_content: StreamingChatMessageContent, + mock_tool_call_result_message: ChatMessageContent, +): + chat_history = ChatHistory() + chat_history.add_user_message("What is 3+3?") + chat_history.add_message(mock_streaming_chat_message_content) + chat_history.add_message(mock_tool_call_result_message) + + chat_completion = AnthropicChatCompletion( + ai_model_id="test_model_id", + service_id="test", + api_key="", + async_client=mock_anthropic_client_completion_stream, + ) + + remaining_messages, system_message_content = chat_completion._prepare_chat_history_for_request( + chat_history, + role_key="role", + content_key="content", + stream=True, + ) + + assert system_message_content is None + assert len(remaining_messages) == 3 + + +@pytest.mark.asyncio +async def test_send_chat_stream_request_tool_calls( + mock_streaming_tool_calls_message: MagicMock, + mock_streaming_chat_message_content: StreamingChatMessageContent, +): + chat_history = ChatHistory() + chat_history.add_user_message("What is 3+3?") + chat_history.add_message(mock_streaming_chat_message_content) + + settings = AnthropicChatPromptExecutionSettings( + temperature=0.2, + max_tokens=100, + top_p=1.0, + frequency_penalty=0.0, + presence_penalty=0.0, + chat_history=chat_history, + ) + + client = MagicMock(spec=AsyncAnthropic) + messages_mock = MagicMock() + messages_mock.stream.return_value = mock_streaming_tool_calls_message + client.messages = messages_mock + + chat_completion = AnthropicChatCompletion( + ai_model_id="test_model_id", + service_id="test", + api_key="", + async_client=client, + ) + + response = chat_completion._send_chat_stream_request(settings) + async for message in response: + assert message is not None + + +def test_client_base_url(mock_anthropic_client_completion: MagicMock): + chat_completion_base = AnthropicChatCompletion( + ai_model_id="test_model_id", service_id="test", api_key="", async_client=mock_anthropic_client_completion + ) + + assert chat_completion_base.service_url() is not None + + +def test_chat_completion_reset_settings( + mock_anthropic_client_completion: MagicMock, +): + chat_completion = AnthropicChatCompletion( + ai_model_id="test_model_id", service_id="test", api_key="", async_client=mock_anthropic_client_completion + ) + + settings = AnthropicChatPromptExecutionSettings(tools=[{"name": "test"}], tool_choice={"type": "any"}) + chat_completion._reset_function_choice_settings(settings) + + assert settings.tools is None + assert settings.tool_choice is None diff --git a/python/tests/unit/connectors/anthropic/test_anthropic_request_settings.py b/python/tests/unit/connectors/anthropic/test_anthropic_request_settings.py index 2cfc2327e655..531823281ae2 100644 --- a/python/tests/unit/connectors/anthropic/test_anthropic_request_settings.py +++ b/python/tests/unit/connectors/anthropic/test_anthropic_request_settings.py @@ -1,17 +1,20 @@ # Copyright (c) Microsoft. All rights reserved. + import pytest from semantic_kernel.connectors.ai.anthropic.prompt_execution_settings.anthropic_prompt_execution_settings import ( AnthropicChatPromptExecutionSettings, ) +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.exceptions import ServiceInvalidExecutionSettingsError def test_default_anthropic_chat_prompt_execution_settings(): settings = AnthropicChatPromptExecutionSettings() assert settings.temperature is None assert settings.top_p is None - assert settings.max_tokens is None + assert settings.max_tokens == 1024 assert settings.messages is None @@ -34,7 +37,7 @@ def test_anthropic_chat_prompt_execution_settings_from_default_completion_config assert chat_settings.service_id == "test_service" assert chat_settings.temperature is None assert chat_settings.top_p is None - assert chat_settings.max_tokens is None + assert chat_settings.max_tokens == 1024 def test_anthropic_chat_prompt_execution_settings_from_openai_prompt_execution_settings(): @@ -111,16 +114,16 @@ def test_create_options(): assert options["max_tokens"] == 128 -def test_create_options_with_function_choice_behavior(): - with pytest.raises(NotImplementedError): +def test_tool_choice_none(): + with pytest.raises(ServiceInvalidExecutionSettingsError, match="Tool choice 'none' is not supported by Anthropic."): AnthropicChatPromptExecutionSettings( service_id="test_service", - function_choice_behavior="auto", extension_data={ "temperature": 0.5, "top_p": 0.5, "max_tokens": 128, - "tools": [{}], + "tool_choice": {"type": "none"}, "messages": [{"role": "system", "content": "Hello"}], }, + function_choice_behavior=FunctionChoiceBehavior.NoneInvoke(), ) diff --git a/python/tests/unit/connectors/google/google_ai/services/test_google_ai_chat_completion.py b/python/tests/unit/connectors/google/google_ai/services/test_google_ai_chat_completion.py index bc24e98d43c0..f60640b9472c 100644 --- a/python/tests/unit/connectors/google/google_ai/services/test_google_ai_chat_completion.py +++ b/python/tests/unit/connectors/google/google_ai/services/test_google_ai_chat_completion.py @@ -189,10 +189,6 @@ async def test_google_ai_chat_completion_with_function_choice_behavior_no_tool_c kernel=kernel, ) - # Remove the latest message since the response from the model will be added to the chat history - # even when the model doesn't return a tool call - chat_history.remove_message(chat_history[-1]) - mock_google_ai_model_generate_content_async.assert_awaited_once_with( contents=google_ai_chat_completion._prepare_chat_history_for_request(chat_history), generation_config=GenerationConfig(**settings.prepare_settings_dict()), diff --git a/python/tests/unit/connectors/google/vertex_ai/services/test_vertex_ai_chat_completion.py b/python/tests/unit/connectors/google/vertex_ai/services/test_vertex_ai_chat_completion.py index 7bed2ae9e776..99ce39a455f7 100644 --- a/python/tests/unit/connectors/google/vertex_ai/services/test_vertex_ai_chat_completion.py +++ b/python/tests/unit/connectors/google/vertex_ai/services/test_vertex_ai_chat_completion.py @@ -189,10 +189,6 @@ async def test_vertex_ai_chat_completion_with_function_choice_behavior_no_tool_c kernel=kernel, ) - # Remove the latest message since the response from the model will be added to the chat history - # even when the model doesn't return a tool call - chat_history.remove_message(chat_history[-1]) - mock_vertex_ai_model_generate_content_async.assert_awaited_once_with( contents=vertex_ai_chat_completion._prepare_chat_history_for_request(chat_history), generation_config=settings.prepare_settings_dict(), diff --git a/python/tests/unit/connectors/memory/test_postgres.py b/python/tests/unit/connectors/memory/test_postgres.py new file mode 100644 index 000000000000..e84692ffab88 --- /dev/null +++ b/python/tests/unit/connectors/memory/test_postgres.py @@ -0,0 +1,301 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import AsyncGenerator +from dataclasses import dataclass +from typing import Annotated, Any +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +import pytest_asyncio +from psycopg import AsyncConnection, AsyncCursor +from psycopg_pool import AsyncConnectionPool +from pytest import fixture, mark + +from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( + OpenAIEmbeddingPromptExecutionSettings, +) +from semantic_kernel.connectors.memory.postgres.postgres_settings import PostgresSettings +from semantic_kernel.connectors.memory.postgres.postgres_store import PostgresStore +from semantic_kernel.data.const import DistanceFunction, IndexKind +from semantic_kernel.data.vector_store_model_decorator import vectorstoremodel +from semantic_kernel.data.vector_store_record_fields import ( + VectorStoreRecordDataField, + VectorStoreRecordKeyField, + VectorStoreRecordVectorField, +) + + +@fixture(scope="function") +def mock_cursor(): + return AsyncMock(spec=AsyncCursor) + + +@fixture(autouse=True) +def mock_connection_pool(mock_cursor: Mock): + with ( + patch( + f"{AsyncConnectionPool.__module__}.{AsyncConnectionPool.__qualname__}.connection", + ) as mock_pool_connection, + patch( + f"{AsyncConnectionPool.__module__}.{AsyncConnectionPool.__qualname__}.open", + new_callable=AsyncMock, + ) as mock_pool_open, + ): + mock_conn = AsyncMock(spec=AsyncConnection) + + mock_pool_connection.return_value.__aenter__.return_value = mock_conn + mock_conn.cursor.return_value.__aenter__.return_value = mock_cursor + + mock_pool_open.return_value = None + + yield mock_pool_connection, mock_pool_open + + +@pytest_asyncio.fixture +async def vector_store(postgres_unit_test_env) -> AsyncGenerator[PostgresStore, None]: + async with await PostgresSettings.create(env_file_path="test.env").create_connection_pool() as pool: + yield PostgresStore(connection_pool=pool) + + +@vectorstoremodel +@dataclass +class SimpleDataModel: + id: Annotated[int, VectorStoreRecordKeyField()] + embedding: Annotated[ + list[float], + VectorStoreRecordVectorField( + embedding_settings={"embedding": OpenAIEmbeddingPromptExecutionSettings(dimensions=1536)}, + index_kind=IndexKind.HNSW, + dimensions=1536, + distance_function=DistanceFunction.COSINE, + property_type="float", + ), + ] + data: Annotated[ + dict[str, Any], + VectorStoreRecordDataField(has_embedding=True, embedding_property_name="embedding", property_type="JSONB"), + ] + + +@mark.asyncio +async def test_vector_store_defaults(vector_store: PostgresStore) -> None: + assert vector_store.connection_pool is not None + async with vector_store.connection_pool.connection() as conn: + assert isinstance(conn, Mock) + + +def test_vector_store_with_connection_pool(vector_store: PostgresStore) -> None: + connection_pool = MagicMock(spec=AsyncConnectionPool) + vector_store = PostgresStore(connection_pool=connection_pool) + assert vector_store.connection_pool == connection_pool + + +@mark.asyncio +async def test_list_collection_names(vector_store: PostgresStore, mock_cursor: Mock) -> None: + mock_cursor.fetchall.return_value = [ + ("test_collection",), + ("test_collection_2",), + ] + names = await vector_store.list_collection_names() + assert names == ["test_collection", "test_collection_2"] + + +def test_get_collection(vector_store: PostgresStore) -> None: + collection = vector_store.get_collection("test_collection", SimpleDataModel) + assert collection.collection_name == "test_collection" + + +@mark.asyncio +async def test_does_collection_exist(vector_store: PostgresStore, mock_cursor: Mock) -> None: + mock_cursor.fetchall.return_value = [("test_collection",)] + collection = vector_store.get_collection("test_collection", SimpleDataModel) + result = await collection.does_collection_exist() + assert result is True + + +@mark.asyncio +async def test_delete_collection(vector_store: PostgresStore, mock_cursor: Mock) -> None: + collection = vector_store.get_collection("test_collection", SimpleDataModel) + await collection.delete_collection() + + assert mock_cursor.execute.call_count == 1 + execute_args, _ = mock_cursor.execute.call_args + statement = execute_args[0] + statement_str = statement.as_string() + + assert statement_str == 'DROP TABLE "public"."test_collection" CASCADE' + + +@mark.asyncio +async def test_delete_records(vector_store: PostgresStore, mock_cursor: Mock) -> None: + collection = vector_store.get_collection("test_collection", SimpleDataModel) + await collection.delete_batch([1, 2]) + + assert mock_cursor.execute.call_count == 1 + execute_args, _ = mock_cursor.execute.call_args + statement = execute_args[0] + statement_str = statement.as_string() + + assert statement_str == """DELETE FROM "public"."test_collection" WHERE "id" IN (1, 2)""" + + +@mark.asyncio +async def test_create_collection_simple_model(vector_store: PostgresStore, mock_cursor: Mock) -> None: + collection = vector_store.get_collection("test_collection", SimpleDataModel) + await collection.create_collection() + + # 2 calls, once for the table creation and once for the index creation + assert mock_cursor.execute.call_count == 2 + + # Check the table creation statement + execute_args, _ = mock_cursor.execute.call_args_list[0] + statement = execute_args[0] + statement_str = statement.as_string() + assert statement_str == ( + 'CREATE TABLE "public"."test_collection" ("id" INTEGER PRIMARY KEY, "embedding" VECTOR(1536), "data" JSONB)' + ) + + # Check the index creation statement + execute_args, _ = mock_cursor.execute.call_args_list[1] + statement = execute_args[0] + statement_str = statement.as_string() + assert statement_str == ( + 'CREATE INDEX "test_collection_embedding_idx" ON "public"."test_collection" ' + 'USING hnsw ("embedding" vector_cosine_ops)' + ) + + +@mark.asyncio +async def test_create_collection_model_with_python_types(vector_store: PostgresStore, mock_cursor: Mock) -> None: + @vectorstoremodel + @dataclass + class ModelWithImplicitTypes: + name: Annotated[str, VectorStoreRecordKeyField()] + age: Annotated[int, VectorStoreRecordDataField()] + data: Annotated[dict[str, Any], VectorStoreRecordDataField(embedding_property_name="embedding")] + embedding: Annotated[list[float], VectorStoreRecordVectorField(dimensions=20)] + scores: Annotated[list[float], VectorStoreRecordDataField()] + tags: Annotated[list[str], VectorStoreRecordDataField()] + + collection = vector_store.get_collection("test_collection", ModelWithImplicitTypes) + + await collection.create_collection() + + assert mock_cursor.execute.call_count == 1 + + execute_args, _ = mock_cursor.execute.call_args + + statement = execute_args[0] + statement_str = statement.as_string() + + assert statement_str == ( + 'CREATE TABLE "public"."test_collection" ' + '("name" TEXT PRIMARY KEY, "age" INTEGER, "data" JSONB, ' + '"embedding" VECTOR(20), "scores" DOUBLE PRECISION[], "tags" TEXT[])' + ) + + +@mark.asyncio +async def test_upsert_records(vector_store: PostgresStore, mock_cursor: Mock) -> None: + collection = vector_store.get_collection("test_collection", SimpleDataModel) + await collection.upsert_batch([ + SimpleDataModel(id=1, embedding=[1.0, 2.0, 3.0], data={"key": "value1"}), + SimpleDataModel(id=2, embedding=[4.0, 5.0, 6.0], data={"key": "value2"}), + SimpleDataModel(id=3, embedding=[5.0, 6.0, 1.0], data={"key": "value3"}), + ]) + + assert mock_cursor.executemany.call_count == 1 + execute_args, _ = mock_cursor.executemany.call_args + statement_str = execute_args[0].as_string() + values = execute_args[1] + assert len(values) == 3 + + assert statement_str == ( + 'INSERT INTO "public"."test_collection" ("id", "embedding", "data") ' + "VALUES (%s, %s, %s) " + 'ON CONFLICT ("id") DO UPDATE SET "embedding" = EXCLUDED."embedding", "data" = EXCLUDED."data"' + ) + + assert values[0] == (1, [1.0, 2.0, 3.0], '{"key": "value1"}') + assert values[1] == (2, [4.0, 5.0, 6.0], '{"key": "value2"}') + assert values[2] == (3, [5.0, 6.0, 1.0], '{"key": "value3"}') + + +@mark.asyncio +async def test_get_records(vector_store: PostgresStore, mock_cursor: Mock) -> None: + mock_cursor.fetchall.return_value = [ + (1, "[1.0, 2.0, 3.0]", {"key": "value1"}), + (2, "[4.0, 5.0, 6.0]", {"key": "value2"}), + (3, "[5.0, 6.0, 1.0]", {"key": "value3"}), + ] + + collection = vector_store.get_collection("test_collection", SimpleDataModel) + records = await collection.get_batch([1, 2, 3]) + + assert len(records) == 3 + assert records[0].id == 1 + assert records[0].embedding == [1.0, 2.0, 3.0] + assert records[0].data == {"key": "value1"} + + assert records[1].id == 2 + assert records[1].embedding == [4.0, 5.0, 6.0] + assert records[1].data == {"key": "value2"} + + assert records[2].id == 3 + assert records[2].embedding == [5.0, 6.0, 1.0] + assert records[2].data == {"key": "value3"} + + +# Test settings + + +def test_settings_connection_string(monkeypatch) -> None: + monkeypatch.delenv("PGHOST", raising=False) + monkeypatch.delenv("PGPORT", raising=False) + monkeypatch.delenv("PGDATABASE", raising=False) + monkeypatch.delenv("PGUSER", raising=False) + monkeypatch.delenv("PGPASSWORD", raising=False) + + settings = PostgresSettings(connection_string="host=localhost port=5432 dbname=dbname user=user password=password") + conn_info = settings.get_connection_args() + + assert conn_info["host"] == "localhost" + assert conn_info["port"] == 5432 + assert conn_info["dbname"] == "dbname" + assert conn_info["user"] == "user" + assert conn_info["password"] == "password" + + +def test_settings_env_connection_string(monkeypatch) -> None: + monkeypatch.delenv("PGHOST", raising=False) + monkeypatch.delenv("PGPORT", raising=False) + monkeypatch.delenv("PGDATABASE", raising=False) + monkeypatch.delenv("PGUSER", raising=False) + monkeypatch.delenv("PGPASSWORD", raising=False) + + monkeypatch.setenv( + "POSTGRES_CONNECTION_STRING", "host=localhost port=5432 dbname=dbname user=user password=password" + ) + + settings = PostgresSettings() + conn_info = settings.get_connection_args() + assert conn_info["host"] == "localhost" + assert conn_info["port"] == 5432 + assert conn_info["dbname"] == "dbname" + assert conn_info["user"] == "user" + assert conn_info["password"] == "password" + + +def test_settings_env_vars(monkeypatch) -> None: + monkeypatch.setenv("PGHOST", "localhost") + monkeypatch.setenv("PGPORT", "5432") + monkeypatch.setenv("PGDATABASE", "dbname") + monkeypatch.setenv("PGUSER", "user") + monkeypatch.setenv("PGPASSWORD", "password") + + settings = PostgresSettings() + conn_info = settings.get_connection_args() + assert conn_info["host"] == "localhost" + assert conn_info["port"] == 5432 + assert conn_info["dbname"] == "dbname" + assert conn_info["user"] == "user" + assert conn_info["password"] == "password" diff --git a/python/tests/unit/connectors/mistral_ai/services/test_mistralai_chat_completion.py b/python/tests/unit/connectors/mistral_ai/services/test_mistralai_chat_completion.py index 5a4e18521c5d..f8e12a91a4ff 100644 --- a/python/tests/unit/connectors/mistral_ai/services/test_mistralai_chat_completion.py +++ b/python/tests/unit/connectors/mistral_ai/services/test_mistralai_chat_completion.py @@ -1,10 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch import pytest from mistralai.async_client import MistralAsyncClient from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.mistral_ai.prompt_execution_settings.mistral_ai_prompt_execution_settings import ( MistralAIChatPromptExecutionSettings, ) @@ -12,9 +13,21 @@ from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIChatPromptExecutionSettings, ) -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.exceptions import ServiceInitializationError, ServiceResponseException +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ( + ChatMessageContent, + FunctionCallContent, + TextContent, +) +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.exceptions import ( + ServiceInitializationError, + ServiceInvalidExecutionSettingsError, + ServiceResponseException, +) from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.functions.kernel_function_decorator import kernel_function from semantic_kernel.kernel import Kernel @@ -68,6 +81,83 @@ async def test_complete_chat_contents( assert content is not None +mock_message_text_content = ChatMessageContent(role=AuthorRole.ASSISTANT, items=[TextContent(text="test")]) + +mock_message_function_call = ChatMessageContent( + role=AuthorRole.ASSISTANT, + items=[ + FunctionCallContent( + name="test", + arguments={"key": "test"}, + ) + ], +) + + +@pytest.mark.parametrize( + "function_choice_behavior,model_responses,expected_result", + [ + pytest.param( + FunctionChoiceBehavior.Auto(), + [[mock_message_function_call], [mock_message_text_content]], + TextContent, + id="auto", + ), + pytest.param( + FunctionChoiceBehavior.Auto(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="auto_none_invoke", + ), + pytest.param( + FunctionChoiceBehavior.Required(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="required_none_invoke", + ), + pytest.param(FunctionChoiceBehavior.NoneInvoke(), [[mock_message_text_content]], TextContent, id="none"), + ], +) +@pytest.mark.asyncio +async def test_complete_chat_contents_function_call_behavior_tool_call( + kernel: Kernel, + mock_settings: MistralAIChatPromptExecutionSettings, + function_choice_behavior: FunctionChoiceBehavior, + model_responses, + expected_result, +): + kernel.add_function("test", kernel_function(lambda key: "test", name="test")) + mock_settings.function_choice_behavior = function_choice_behavior + + arguments = KernelArguments() + chat_completion_base = MistralAIChatCompletion(ai_model_id="test_model_id", service_id="test", api_key="") + + with ( + patch.object(chat_completion_base, "_inner_get_chat_message_contents", side_effect=model_responses), + ): + response: list[ChatMessageContent] = await chat_completion_base.get_chat_message_contents( + chat_history=ChatHistory(system_message="Test"), settings=mock_settings, kernel=kernel, arguments=arguments + ) + + assert all(isinstance(content, expected_result) for content in response[0].items) + + +@pytest.mark.asyncio +async def test_complete_chat_contents_function_call_behavior_without_kernel( + mock_settings: MistralAIChatPromptExecutionSettings, + mock_mistral_ai_client_completion: MistralAsyncClient, +): + chat_history = MagicMock() + chat_completion_base = MistralAIChatCompletion( + ai_model_id="test_model_id", service_id="test", api_key="", async_client=mock_mistral_ai_client_completion + ) + + mock_settings.function_choice_behavior = FunctionChoiceBehavior.Auto() + + with pytest.raises(ServiceInvalidExecutionSettingsError): + await chat_completion_base.get_chat_message_contents(chat_history=chat_history, settings=mock_settings) + + @pytest.mark.asyncio async def test_complete_chat_stream_contents( kernel: Kernel, @@ -90,6 +180,70 @@ async def test_complete_chat_stream_contents( assert content is not None +mock_message_function_call = StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, items=[FunctionCallContent(name="test")], choice_index="0" +) + +mock_message_text_content = StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, items=[TextContent(text="test")], choice_index="0" +) + + +@pytest.mark.parametrize( + "function_choice_behavior,model_responses,expected_result", + [ + pytest.param( + FunctionChoiceBehavior.Auto(), + [[mock_message_function_call], [mock_message_text_content]], + TextContent, + id="auto", + ), + pytest.param( + FunctionChoiceBehavior.Auto(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="auto_none_invoke", + ), + pytest.param( + FunctionChoiceBehavior.Required(auto_invoke=False), + [[mock_message_function_call]], + FunctionCallContent, + id="required_none_invoke", + ), + pytest.param(FunctionChoiceBehavior.NoneInvoke(), [[mock_message_text_content]], TextContent, id="none"), + ], +) +@pytest.mark.asyncio +async def test_complete_chat_contents_streaming_function_call_behavior_tool_call( + kernel: Kernel, + mock_settings: MistralAIChatPromptExecutionSettings, + function_choice_behavior: FunctionChoiceBehavior, + model_responses, + expected_result, +): + mock_settings.function_choice_behavior = function_choice_behavior + + # Mock sequence of model responses + generator_mocks = [] + for mock_message in model_responses: + generator_mock = MagicMock() + generator_mock.__aiter__.return_value = [mock_message] + generator_mocks.append(generator_mock) + + arguments = KernelArguments() + chat_completion_base = MistralAIChatCompletion(ai_model_id="test_model_id", service_id="test", api_key="") + + with patch.object(chat_completion_base, "_inner_get_streaming_chat_message_contents", side_effect=generator_mocks): + messages = [] + async for chunk in chat_completion_base.get_streaming_chat_message_contents( + chat_history=ChatHistory(system_message="Test"), settings=mock_settings, kernel=kernel, arguments=arguments + ): + messages.append(chunk) + + response = messages[-1] + assert all(isinstance(content, expected_result) for content in response[0].items) + + @pytest.mark.asyncio async def test_mistral_ai_sdk_exception(kernel: Kernel, mock_settings: MistralAIChatPromptExecutionSettings): chat_history = MagicMock() @@ -152,26 +306,20 @@ def test_mistral_ai_chat_completion_init_constructor(mistralai_unit_test_env) -> def test_mistral_ai_chat_completion_init_constructor_missing_model(mistralai_unit_test_env) -> None: # Test successful initialization with pytest.raises(ServiceInitializationError): - MistralAIChatCompletion( - api_key="overwrite_api_key", - env_file_path="test.env" - ) + MistralAIChatCompletion(api_key="overwrite_api_key", env_file_path="test.env") @pytest.mark.parametrize("exclude_list", [["MISTRALAI_API_KEY", "MISTRALAI_CHAT_MODEL_ID"]], indirect=True) def test_mistral_ai_chat_completion_init_constructor_missing_api_key(mistralai_unit_test_env) -> None: # Test successful initialization with pytest.raises(ServiceInitializationError): - MistralAIChatCompletion( - ai_model_id="overwrite_model_id", - env_file_path="test.env" - ) + MistralAIChatCompletion(ai_model_id="overwrite_model_id", env_file_path="test.env") def test_mistral_ai_chat_completion_init_hybrid(mistralai_unit_test_env) -> None: mistral_ai_chat_completion = MistralAIChatCompletion( - ai_model_id="overwrite_model_id", - env_file_path="test.env", + ai_model_id="overwrite_model_id", + env_file_path="test.env", ) assert mistral_ai_chat_completion.ai_model_id == "overwrite_model_id" assert mistral_ai_chat_completion.async_client._api_key == "test_api_key" diff --git a/python/tests/unit/connectors/mistral_ai/services/test_mistralai_text_embeddings.py b/python/tests/unit/connectors/mistral_ai/services/test_mistralai_text_embeddings.py index 98550ca6f1ad..61c960b4810f 100644 --- a/python/tests/unit/connectors/mistral_ai/services/test_mistralai_text_embeddings.py +++ b/python/tests/unit/connectors/mistral_ai/services/test_mistralai_text_embeddings.py @@ -13,7 +13,7 @@ def test_embedding_with_env_variables(mistralai_unit_test_env): text_embedding = MistralAITextEmbedding() assert text_embedding.ai_model_id == "test_embedding_model_id" - assert text_embedding.client._api_key == "test_api_key" + assert text_embedding.async_client._api_key == "test_api_key" @pytest.mark.parametrize("exclude_list", [["MISTRALAI_API_KEY", "MISTRALAI_EMBEDDING_MODEL_ID"]], indirect=True) @@ -23,33 +23,33 @@ def test_embedding_with_constructor(mistralai_unit_test_env): ai_model_id="overwrite-model", ) assert text_embedding.ai_model_id == "overwrite-model" - assert text_embedding.client._api_key == "overwrite-api-key" + assert text_embedding.async_client._api_key == "overwrite-api-key" def test_embedding_with_client(mistralai_unit_test_env): client = MagicMock(spec=MistralAsyncClient) - text_embedding = MistralAITextEmbedding(client=client) - assert text_embedding.client == client + text_embedding = MistralAITextEmbedding(async_client=client) + assert text_embedding.async_client == client assert text_embedding.ai_model_id == "test_embedding_model_id" def test_embedding_with_api_key(mistralai_unit_test_env): text_embedding = MistralAITextEmbedding(api_key="overwrite-api-key") - assert text_embedding.client._api_key == "overwrite-api-key" + assert text_embedding.async_client._api_key == "overwrite-api-key" assert text_embedding.ai_model_id == "test_embedding_model_id" def test_embedding_with_model(mistralai_unit_test_env): text_embedding = MistralAITextEmbedding(ai_model_id="overwrite-model") assert text_embedding.ai_model_id == "overwrite-model" - assert text_embedding.client._api_key == "test_api_key" + assert text_embedding.async_client._api_key == "test_api_key" -@pytest.mark.parametrize("exclude_list", [["MISTRALAI_EMBEDDING_MODEL_ID"]], indirect=True) +@pytest.mark.parametrize("exclude_list", [["MISTRALAI_EMBEDDING_MODEL_ID"]], indirect=True) def test_embedding_with_model_without_env(mistralai_unit_test_env): text_embedding = MistralAITextEmbedding(ai_model_id="overwrite-model") assert text_embedding.ai_model_id == "overwrite-model" - assert text_embedding.client._api_key == "test_api_key" + assert text_embedding.async_client._api_key == "test_api_key" @pytest.mark.parametrize("exclude_list", [["MISTRALAI_EMBEDDING_MODEL_ID"]], indirect=True) @@ -90,7 +90,7 @@ async def test_embedding_generate_raw_embedding(mistralai_unit_test_env): mock_client = AsyncMock(spec=MistralAsyncClient) mock_embedding_response = MagicMock(spec=EmbeddingResponse, data=[MagicMock(embedding=[1, 2, 3, 4, 5])]) mock_client.embeddings.return_value = mock_embedding_response - text_embedding = MistralAITextEmbedding(client=mock_client) + text_embedding = MistralAITextEmbedding(async_client=mock_client) embedding = await text_embedding.generate_raw_embeddings(["test"]) assert embedding == [[1, 2, 3, 4, 5]] @@ -100,7 +100,7 @@ async def test_embedding_generate_embedding(mistralai_unit_test_env): mock_client = AsyncMock(spec=MistralAsyncClient) mock_embedding_response = MagicMock(spec=EmbeddingResponse, data=[MagicMock(embedding=[1, 2, 3, 4, 5])]) mock_client.embeddings.return_value = mock_embedding_response - text_embedding = MistralAITextEmbedding(client=mock_client) + text_embedding = MistralAITextEmbedding(async_client=mock_client) embedding = await text_embedding.generate_embeddings(["test"]) assert embedding.tolist() == [[1, 2, 3, 4, 5]] @@ -109,6 +109,6 @@ async def test_embedding_generate_embedding(mistralai_unit_test_env): async def test_embedding_generate_embedding_exception(mistralai_unit_test_env): mock_client = AsyncMock(spec=MistralAsyncClient) mock_client.embeddings.side_effect = Exception("Test Exception") - text_embedding = MistralAITextEmbedding(client=mock_client) + text_embedding = MistralAITextEmbedding(async_client=mock_client) with pytest.raises(ServiceResponseException): await text_embedding.generate_embeddings(["test"]) diff --git a/python/tests/unit/connectors/mistral_ai/test_mistralai_request_settings.py b/python/tests/unit/connectors/mistral_ai/test_mistralai_request_settings.py index 636f1565b095..65ea5f1d39b7 100644 --- a/python/tests/unit/connectors/mistral_ai/test_mistralai_request_settings.py +++ b/python/tests/unit/connectors/mistral_ai/test_mistralai_request_settings.py @@ -1,5 +1,4 @@ # Copyright (c) Microsoft. All rights reserved. -import pytest from semantic_kernel.connectors.ai.mistral_ai.prompt_execution_settings.mistral_ai_prompt_execution_settings import ( MistralAIChatPromptExecutionSettings, @@ -112,15 +111,16 @@ def test_create_options(): def test_create_options_with_function_choice_behavior(): - with pytest.raises(NotImplementedError): - MistralAIChatPromptExecutionSettings( - service_id="test_service", - function_choice_behavior="auto", - extension_data={ - "temperature": 0.5, - "top_p": 0.5, - "max_tokens": 128, - "tools": [{}], - "messages": [{"role": "system", "content": "Hello"}], - }, - ) + settings = MistralAIChatPromptExecutionSettings( + service_id="test_service", + function_choice_behavior="auto", + extension_data={ + "temperature": 0.5, + "top_p": 0.5, + "max_tokens": 128, + "tools": [{}], + "messages": [{"role": "system", "content": "Hello"}], + }, + ) + assert settings.function_choice_behavior + diff --git a/python/tests/unit/connectors/ollama/services/test_ollama_chat_completion.py b/python/tests/unit/connectors/ollama/services/test_ollama_chat_completion.py index 05f850ff7e8b..a6daf1ede76c 100644 --- a/python/tests/unit/connectors/ollama/services/test_ollama_chat_completion.py +++ b/python/tests/unit/connectors/ollama/services/test_ollama_chat_completion.py @@ -4,10 +4,7 @@ import pytest -from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import ( - OllamaChatPromptExecutionSettings, - OllamaTextPromptExecutionSettings, -) +from semantic_kernel.connectors.ai.ollama.ollama_prompt_execution_settings import OllamaChatPromptExecutionSettings from semantic_kernel.connectors.ai.ollama.services.ollama_chat_completion import OllamaChatCompletion from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError, ServiceInvalidResponseError @@ -25,6 +22,12 @@ def test_init_empty_service_id(model_id): assert ollama.service_id == model_id +def test_init_empty_string_ai_model_id(): + """Test that the service initializes with a error if there is no ai_model_id.""" + with pytest.raises(ServiceInitializationError): + _ = OllamaChatCompletion(ai_model_id="") + + def test_custom_client(model_id, custom_client): """Test that the service initializes correctly with a custom client.""" ollama = OllamaChatCompletion(ai_model_id=model_id, client=custom_client) @@ -32,7 +35,7 @@ def test_custom_client(model_id, custom_client): @pytest.mark.parametrize("exclude_list", [["OLLAMA_MODEL"]], indirect=True) -def test_init_empty_model_id(ollama_unit_test_env): +def test_init_empty_model_id_in_env(ollama_unit_test_env): """Test that the service initializes incorrectly with an empty model id.""" with pytest.raises(ServiceInitializationError): _ = OllamaChatCompletion(env_file_path="fake_env_file_path.env") @@ -61,20 +64,13 @@ async def test_custom_host( OllamaChatPromptExecutionSettings(service_id=service_id, options=default_options), ) - text_responses = await ollama.get_text_contents( - prompt, - OllamaTextPromptExecutionSettings(service_id=service_id, options=default_options), - ) - # Check that the client was initialized once with the correct host assert mock_client.call_count == 1 mock_client.assert_called_with(host=host) - # Check that the chat client was called twice and the responses are correct - assert mock_chat_client.call_count == 2 + # Check that the chat client was called once and the responses are correct + assert mock_chat_client.call_count == 1 assert len(chat_responses) == 1 assert chat_responses[0].content == "test_response" - assert len(text_responses) == 1 - assert text_responses[0].text == "test_response" @pytest.mark.asyncio @@ -104,18 +100,11 @@ async def test_custom_host_streaming( assert messages[0].role == "assistant" assert messages[0].content == "test_response" - async for messages in ollama.get_streaming_text_contents( - prompt, - OllamaTextPromptExecutionSettings(service_id=service_id, options=default_options), - ): - assert len(messages) == 1 - assert messages[0].text == "test_response" - # Check that the client was initialized once with the correct host assert mock_client.call_count == 1 mock_client.assert_called_with(host=host) - # Check that the chat client was called twice and the responses are correct - assert mock_chat_client.call_count == 2 + # Check that the chat client was called once + assert mock_chat_client.call_count == 1 @pytest.mark.asyncio @@ -161,48 +150,6 @@ async def test_chat_completion_wrong_return_type( ) -@pytest.mark.asyncio -@patch("ollama.AsyncClient.chat") -async def test_text_completion(mock_chat_client, model_id, service_id, prompt, default_options): - """Test that the text completion service completes correctly.""" - mock_chat_client.return_value = {"message": {"content": "test_response"}} - ollama = OllamaChatCompletion(ai_model_id=model_id) - response = await ollama.get_text_contents( - prompt=prompt, - settings=OllamaTextPromptExecutionSettings(service_id=service_id, options=default_options), - ) - - assert len(response) == 1 - assert response[0].text == "test_response" - mock_chat_client.assert_called_once_with( - model=model_id, - messages=[{"role": "user", "content": prompt}], - options=default_options, - stream=False, - ) - - -@pytest.mark.asyncio -@patch("ollama.AsyncClient.chat") -async def test_text_completion_wrong_return_type( - mock_chat_client, - mock_streaming_chat_response, - model_id, - service_id, - chat_history, - default_options, -): - """Test that the text completion service fails when the return type is incorrect.""" - mock_chat_client.return_value = mock_streaming_chat_response # should not be a streaming response - - ollama = OllamaChatCompletion(ai_model_id=model_id) - with pytest.raises(ServiceInvalidResponseError): - await ollama.get_text_contents( - chat_history, - OllamaTextPromptExecutionSettings(service_id=service_id, options=default_options), - ) - - @pytest.mark.asyncio @patch("ollama.AsyncClient.chat") async def test_streaming_chat_completion( @@ -256,58 +203,3 @@ async def test_streaming_chat_completion_wrong_return_type( OllamaChatPromptExecutionSettings(service_id=service_id, options=default_options), ): pass - - -@pytest.mark.asyncio -@patch("ollama.AsyncClient.chat") -async def test_streaming_text_completion( - mock_chat_client, - mock_streaming_chat_response, - model_id, - service_id, - prompt, - default_options, -): - """Test that the streaming text completion service completes correctly.""" - mock_chat_client.return_value = mock_streaming_chat_response - - ollama = OllamaChatCompletion(ai_model_id=model_id) - response = ollama.get_streaming_text_contents( - prompt, - OllamaTextPromptExecutionSettings(service_id=service_id, options=default_options), - ) - - responses = [] - async for line in response: - if line: - assert line[0].text == "test_response" - responses.append(line[0].text) - assert len(responses) == 1 - - mock_chat_client.assert_called_once_with( - model=model_id, - messages=[{"role": "user", "content": prompt}], - options=default_options, - stream=True, - ) - - -@pytest.mark.asyncio -@patch("ollama.AsyncClient.chat") -async def test_streaming_text_completion_wrong_return_type( - mock_chat_client, - model_id, - service_id, - chat_history, - default_options, -): - """Test that the text completion streaming service fails when the return type is incorrect.""" - mock_chat_client.return_value = {"message": {"content": "test_response"}} # should not be a non-streaming response - - ollama = OllamaChatCompletion(ai_model_id=model_id) - with pytest.raises(ServiceInvalidResponseError): - async for _ in ollama.get_streaming_text_contents( - chat_history, - OllamaTextPromptExecutionSettings(service_id=service_id, options=default_options), - ): - pass diff --git a/python/tests/unit/connectors/open_ai/services/test_azure_chat_completion.py b/python/tests/unit/connectors/open_ai/services/test_azure_chat_completion.py index cfe96401ae9a..b4891c68f5cc 100644 --- a/python/tests/unit/connectors/open_ai/services/test_azure_chat_completion.py +++ b/python/tests/unit/connectors/open_ai/services/test_azure_chat_completion.py @@ -92,14 +92,6 @@ def test_init_with_empty_deployment_name(azure_openai_unit_test_env) -> None: ) -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) -def test_init_with_empty_api_key(azure_openai_unit_test_env) -> None: - with pytest.raises(ServiceInitializationError): - AzureChatCompletion( - env_file_path="test.env", - ) - - @pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_BASE_URL"]], indirect=True) def test_init_with_empty_endpoint_and_base_url(azure_openai_unit_test_env) -> None: with pytest.raises(ServiceInitializationError): @@ -341,18 +333,16 @@ async def test_azure_on_your_data_string( message=ChatCompletionMessage( content="test", role="assistant", - context=json.dumps( - { - "citations": { - "content": "test content", - "title": "test title", - "url": "test url", - "filepath": "test filepath", - "chunk_id": "test chunk_id", - }, - "intent": "query used", - } - ), + context=json.dumps({ + "citations": { + "content": "test content", + "title": "test title", + "url": "test url", + "filepath": "test filepath", + "chunk_id": "test chunk_id", + }, + "intent": "query used", + }), ), finish_reason="stop", ) @@ -742,7 +732,7 @@ async def test_no_kernel_provided_throws_error( with pytest.raises( ServiceInvalidExecutionSettingsError, - match="The kernel is required for OpenAI tool calls.", + match="The kernel is required for function calls.", ): await azure_chat_completion.get_chat_message_contents( chat_history=chat_history, settings=complete_prompt_execution_settings @@ -769,7 +759,7 @@ async def test_auto_invoke_false_no_kernel_provided_throws_error( with pytest.raises( ServiceInvalidExecutionSettingsError, - match="The kernel is required for OpenAI tool calls.", + match="The kernel is required for function calls.", ): await azure_chat_completion.get_chat_message_contents( chat_history=chat_history, settings=complete_prompt_execution_settings diff --git a/python/tests/unit/connectors/open_ai/services/test_azure_text_completion.py b/python/tests/unit/connectors/open_ai/services/test_azure_text_completion.py index f4fb67fe3ed4..760e63293016 100644 --- a/python/tests/unit/connectors/open_ai/services/test_azure_text_completion.py +++ b/python/tests/unit/connectors/open_ai/services/test_azure_text_completion.py @@ -11,6 +11,7 @@ OpenAITextPromptExecutionSettings, ) from semantic_kernel.connectors.ai.open_ai.services.azure_text_completion import AzureTextCompletion +from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase from semantic_kernel.contents.text_content import TextContent from semantic_kernel.exceptions import ServiceInitializationError @@ -54,6 +55,20 @@ def test_init_with_custom_header(azure_openai_unit_test_env) -> None: assert azure_text_completion.client.default_headers[key] == value +def test_azure_text_embedding_generates_no_token_with_api_key_in_env(azure_openai_unit_test_env) -> None: + with ( + patch( + f"{AzureOpenAISettings.__module__}.{AzureOpenAISettings.__qualname__}.get_azure_openai_auth_token", + ) as mock_get_token, + ): + mock_get_token.return_value = "test_token" + azure_text_completion = AzureTextCompletion() + + assert azure_text_completion.client is not None + # API key is provided in env var, so the ad_token should be None + assert mock_get_token.call_count == 0 + + @pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_TEXT_DEPLOYMENT_NAME"]], indirect=True) def test_init_with_empty_deployment_name(monkeypatch, azure_openai_unit_test_env) -> None: monkeypatch.delenv("AZURE_OPENAI_TEXT_DEPLOYMENT_NAME", raising=False) @@ -63,14 +78,6 @@ def test_init_with_empty_deployment_name(monkeypatch, azure_openai_unit_test_env ) -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) -def test_init_with_empty_api_key(azure_openai_unit_test_env) -> None: - with pytest.raises(ServiceInitializationError): - AzureTextCompletion( - env_file_path="test.env", - ) - - @pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_BASE_URL"]], indirect=True) def test_init_with_empty_endpoint_and_base_url(azure_openai_unit_test_env) -> None: with pytest.raises(ServiceInitializationError): diff --git a/python/tests/unit/connectors/open_ai/services/test_azure_text_embedding.py b/python/tests/unit/connectors/open_ai/services/test_azure_text_embedding.py index bdb97b1b0070..169405facbb7 100644 --- a/python/tests/unit/connectors/open_ai/services/test_azure_text_embedding.py +++ b/python/tests/unit/connectors/open_ai/services/test_azure_text_embedding.py @@ -8,6 +8,7 @@ from semantic_kernel.connectors.ai.embeddings.embedding_generator_base import EmbeddingGeneratorBase from semantic_kernel.connectors.ai.open_ai.services.azure_text_embedding import AzureTextEmbedding +from semantic_kernel.connectors.ai.open_ai.settings.azure_open_ai_settings import AzureOpenAISettings from semantic_kernel.exceptions.service_exceptions import ServiceInitializationError @@ -29,14 +30,6 @@ def test_azure_text_embedding_init_with_empty_deployment_name(azure_openai_unit_ ) -@pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_API_KEY"]], indirect=True) -def test_azure_text_embedding_init_with_empty_api_key(azure_openai_unit_test_env) -> None: - with pytest.raises(ServiceInitializationError): - AzureTextEmbedding( - env_file_path="test.env", - ) - - @pytest.mark.parametrize("exclude_list", [["AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_BASE_URL"]], indirect=True) def test_azure_text_embedding_init_with_empty_endpoint_and_base_url(azure_openai_unit_test_env) -> None: with pytest.raises(ServiceInitializationError): @@ -82,6 +75,20 @@ def test_azure_text_embedding_init_with_from_dict(azure_openai_unit_test_env) -> assert azure_text_embedding.client.default_headers[key] == value +def test_azure_text_embedding_generates_no_token_with_api_key_in_env(azure_openai_unit_test_env) -> None: + with ( + patch( + f"{AzureOpenAISettings.__module__}.{AzureOpenAISettings.__qualname__}.get_azure_openai_auth_token", + ) as mock_get_token, + ): + mock_get_token.return_value = "test_token" + azure_text_embedding = AzureTextEmbedding() + + assert azure_text_embedding.client is not None + # API key is provided in env var, so the ad_token should be None + assert mock_get_token.call_count == 0 + + @pytest.mark.asyncio @patch.object(AsyncEmbeddings, "create", new_callable=AsyncMock) async def test_azure_text_embedding_calls_with_parameters(mock_create, azure_openai_unit_test_env) -> None: diff --git a/python/tests/unit/connectors/open_ai/services/test_open_ai_chat_completion_base.py b/python/tests/unit/connectors/open_ai/services/test_open_ai_chat_completion_base.py index 9be0b412d8b9..401349542f02 100644 --- a/python/tests/unit/connectors/open_ai/services/test_open_ai_chat_completion_base.py +++ b/python/tests/unit/connectors/open_ai/services/test_open_ai_chat_completion_base.py @@ -17,9 +17,7 @@ from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.open_ai_prompt_execution_settings import ( OpenAIChatPromptExecutionSettings, ) -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import ( - OpenAIChatCompletion, -) +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.contents import StreamingChatMessageContent from semantic_kernel.contents.chat_history import ChatHistory @@ -176,7 +174,7 @@ async def test_cmc_function_call_behavior( service_id="test_service_id", function_call_behavior=FunctionCallBehavior.AutoInvokeKernelFunctions() ) with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base.OpenAIChatCompletionBase._process_function_call", + "semantic_kernel.kernel.Kernel.invoke_function_call", new_callable=AsyncMock, ) as mock_process_function_call: openai_chat_completion = OpenAIChatCompletion() @@ -227,7 +225,7 @@ async def test_cmc_function_choice_behavior( service_id="test_service_id", function_choice_behavior=FunctionChoiceBehavior.Auto() ) with patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base.OpenAIChatCompletionBase._process_function_call", + "semantic_kernel.kernel.Kernel.invoke_function_call", new_callable=AsyncMock, ) as mock_process_function_call: openai_chat_completion = OpenAIChatCompletion() @@ -323,6 +321,32 @@ async def test_cmc_no_fcc_in_response( ) +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_cmc_structured_output_no_fcc( + mock_create, + kernel: Kernel, + chat_history: ChatHistory, + mock_chat_completion_response: ChatCompletion, + openai_unit_test_env, +): + mock_create.return_value = mock_chat_completion_response + chat_history.add_user_message("hello world") + complete_prompt_execution_settings = OpenAIChatPromptExecutionSettings(service_id="test_service_id") + + # Define a mock response format + class Test: + name: str + + complete_prompt_execution_settings.response_format = Test + + openai_chat_completion = OpenAIChatCompletion() + await openai_chat_completion.get_chat_message_contents( + chat_history=chat_history, settings=complete_prompt_execution_settings, kernel=kernel + ) + mock_create.assert_awaited_once() + + @pytest.mark.asyncio @patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) async def test_cmc_run_out_of_auto_invoke_loop( @@ -389,6 +413,7 @@ async def test_scmc_prompt_execution_settings( mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + stream_options={"include_usage": True}, messages=openai_chat_completion._prepare_chat_history_for_request(chat_history), ) @@ -456,6 +481,7 @@ async def test_scmc( mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + stream_options={"include_usage": True}, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), ) @@ -500,10 +526,55 @@ async def test_scmc_singular( mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + stream_options={"include_usage": True}, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), ) +@pytest.mark.asyncio +@patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) +async def test_scmc_structured_output_no_fcc( + mock_create, + kernel: Kernel, + chat_history: ChatHistory, + openai_unit_test_env, +): + content1 = ChatCompletionChunk( + id="test_id", + choices=[], + created=0, + model="test", + object="chat.completion.chunk", + ) + content2 = ChatCompletionChunk( + id="test_id", + choices=[ChunkChoice(index=0, delta=ChunkChoiceDelta(content="test", role="assistant"), finish_reason="stop")], + created=0, + model="test", + object="chat.completion.chunk", + ) + stream = MagicMock(spec=AsyncStream) + stream.__aiter__.return_value = [content1, content2] + mock_create.return_value = stream + chat_history.add_user_message("hello world") + complete_prompt_execution_settings = OpenAIChatPromptExecutionSettings(service_id="test_service_id") + + # Define a mock response format + class Test: + name: str + + complete_prompt_execution_settings.response_format = Test + openai_chat_completion = OpenAIChatCompletion() + async for msg in openai_chat_completion.get_streaming_chat_message_content( + chat_history=chat_history, + settings=complete_prompt_execution_settings, + kernel=kernel, + arguments=KernelArguments(), + ): + assert isinstance(msg, StreamingChatMessageContent) + mock_create.assert_awaited_once() + + @pytest.mark.asyncio @patch.object(AsyncChatCompletions, "create", new_callable=AsyncMock) async def test_scmc_function_call_behavior( @@ -535,6 +606,7 @@ async def test_scmc_function_call_behavior( mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + stream_options={"include_usage": True}, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), ) @@ -570,6 +642,7 @@ async def test_scmc_function_choice_behavior( mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + stream_options={"include_usage": True}, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), ) @@ -640,6 +713,7 @@ async def test_scmc_no_fcc_in_response( mock_create.assert_awaited_once_with( model=openai_unit_test_env["OPENAI_CHAT_MODEL_ID"], stream=True, + stream_options={"include_usage": True}, messages=openai_chat_completion._prepare_chat_history_for_request(orig_chat_history), ) diff --git a/python/tests/unit/connectors/open_ai/test_openai_request_settings.py b/python/tests/unit/connectors/open_ai/test_openai_request_settings.py index f920290c9a98..db39285f39a6 100644 --- a/python/tests/unit/connectors/open_ai/test_openai_request_settings.py +++ b/python/tests/unit/connectors/open_ai/test_openai_request_settings.py @@ -1,9 +1,11 @@ # Copyright (c) Microsoft. All rights reserved. import pytest +from pydantic import BaseModel from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( AzureAISearchDataSource, + AzureAISearchDataSourceParameters, AzureChatPromptExecutionSettings, ExtraBody, ) @@ -14,6 +16,20 @@ from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings from semantic_kernel.connectors.memory.azure_cognitive_search.azure_ai_search_settings import AzureAISearchSettings from semantic_kernel.exceptions import ServiceInvalidExecutionSettingsError +from semantic_kernel.kernel_pydantic import KernelBaseModel + + +############################################ +# Test classes for structured output +class TestClass: + attribute: str + + +class TestClassPydantic(KernelBaseModel): + attribute: str + + +############################################ def test_default_openai_chat_prompt_execution_settings(): @@ -199,7 +215,7 @@ def test_create_options_azure_data(): parameters={ "indexName": "test-index", "endpoint": "test-endpoint", - "authentication": {"type": "api_key", "api_key": "test-key"}, + "authentication": {"type": "api_key", "key": "test-key"}, } ) extra = ExtraBody(data_sources=[az_source]) @@ -279,8 +295,76 @@ def test_azure_open_ai_chat_prompt_execution_settings_with_aisearch_data_sources assert settings.extra_body["dataSources"][0]["type"] == "AzureCognitiveSearch" +@pytest.mark.parametrize( + "authentication", + [ + {"type": "APIKey", "key": "test_key"}, + {"type": "api_key", "key": "test_key"}, + pytest.param({"type": "api_key"}, marks=pytest.mark.xfail), + {"type": "SystemAssignedManagedIdentity"}, + {"type": "system_assigned_managed_identity"}, + {"type": "UserAssignedManagedIdentity", "managed_identity_resource_id": "test_id"}, + {"type": "user_assigned_managed_identity", "managed_identity_resource_id": "test_id"}, + pytest.param({"type": "user_assigned_managed_identity"}, marks=pytest.mark.xfail), + {"type": "AccessToken", "access_token": "test_token"}, + {"type": "access_token", "access_token": "test_token"}, + pytest.param({"type": "access_token"}, marks=pytest.mark.xfail), + pytest.param({"type": "invalid"}, marks=pytest.mark.xfail), + ], +) +def test_aisearch_data_source_parameters(authentication) -> None: + AzureAISearchDataSourceParameters(index_name="test_index", authentication=authentication) + + def test_azure_open_ai_chat_prompt_execution_settings_with_response_format_json(): response_format = {"type": "json_object"} settings = AzureChatPromptExecutionSettings(response_format=response_format) options = settings.prepare_settings_dict() assert options["response_format"] == response_format + + +def test_openai_chat_prompt_execution_settings_with_json_structured_output(): + settings = OpenAIChatPromptExecutionSettings() + settings.response_format = { + "type": "json_schema", + "json_schema": { + "name": "math_response", + "schema": { + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": {"explanation": {"type": "string"}, "output": {"type": "string"}}, + "required": ["explanation", "output"], + "additionalProperties": False, + }, + }, + "final_answer": {"type": "string"}, + }, + "required": ["steps", "final_answer"], + "additionalProperties": False, + }, + "strict": True, + }, + } + assert isinstance(settings.response_format, dict) + + +def test_openai_chat_prompt_execution_settings_with_nonpydantic_type_structured_output(): + settings = OpenAIChatPromptExecutionSettings() + settings.response_format = TestClass + assert isinstance(settings.response_format, type) + + +def test_openai_chat_prompt_execution_settings_with_pydantic_type_structured_output(): + settings = OpenAIChatPromptExecutionSettings() + settings.response_format = TestClassPydantic + assert issubclass(settings.response_format, BaseModel) + + +def test_openai_chat_prompt_execution_settings_with_invalid_structured_output(): + settings = OpenAIChatPromptExecutionSettings() + with pytest.raises(ServiceInvalidExecutionSettingsError): + settings.response_format = "invalid" diff --git a/python/tests/unit/connectors/openapi/openapi_todo.yaml b/python/tests/unit/connectors/openapi/openapi_todo.yaml new file mode 100644 index 000000000000..3afd713b809e --- /dev/null +++ b/python/tests/unit/connectors/openapi/openapi_todo.yaml @@ -0,0 +1,57 @@ +openapi: 3.0.0 +info: + title: Todo List API + version: 1.0.0 + description: API for managing todo lists +paths: + /list: + get: + summary: Get todo list + operationId: get_todo_list + description: get todo list from specific group + parameters: + - name: listName + in: query + required: true + description: todo list group name description + schema: + type: string + description: todo list group name + responses: + "200": + description: Successful response + content: + application/json: + schema: + type: array + items: + type: object + properties: + task: + type: string + listName: + type: string + + /add: + post: + summary: Add a task to a list + operationId: add_todo_list + description: add todo to specific group + requestBody: + required: true + content: + application/json: + schema: + type: object + required: + - task + properties: + task: + type: string + description: task name + listName: + type: string + description: task group name + responses: + "201": + description: Task added successfully diff --git a/python/tests/unit/connectors/openapi/test_openapi_parser.py b/python/tests/unit/connectors/openapi/test_openapi_parser.py index 71548537e30a..0e4d278a1667 100644 --- a/python/tests/unit/connectors/openapi/test_openapi_parser.py +++ b/python/tests/unit/connectors/openapi/test_openapi_parser.py @@ -1,10 +1,14 @@ # Copyright (c) Microsoft. All rights reserved. +import os import pytest -from semantic_kernel.connectors.openapi_plugin.openapi_manager import OpenApiParser +from semantic_kernel.connectors.openapi_plugin.openapi_manager import OpenApiParser, create_functions_from_openapi from semantic_kernel.exceptions.function_exceptions import PluginInitializationError +from semantic_kernel.functions import KernelFunctionFromMethod, KernelFunctionMetadata, KernelParameterMetadata + +current_dir = os.path.dirname(os.path.abspath(__file__)) def test_parse_parameters_missing_in_field(): @@ -14,6 +18,62 @@ def test_parse_parameters_missing_in_field(): parser._parse_parameters(parameters) +def test_parse_parameters_get_query(): + """Verify whether the get request query parameter can be successfully parsed""" + openapi_fcs: list[KernelFunctionFromMethod] = create_functions_from_openapi( + plugin_name="todo", + openapi_document_path=os.path.join(current_dir, "openapi_todo.yaml"), + execution_settings=None, + ) + + get_todo_list: list[KernelFunctionMetadata] = [ + f.metadata for f in openapi_fcs if f.metadata.name == "get_todo_list" + ] + + assert get_todo_list + + get_todo_params: list[KernelParameterMetadata] = get_todo_list[0].parameters + assert get_todo_params + assert get_todo_params[0].name == "listName" + assert get_todo_params[0].description == "todo list group name description" + assert get_todo_params[0].is_required + assert get_todo_params[0].schema_data + assert get_todo_params[0].schema_data.get("type") == "string" + assert get_todo_params[0].schema_data.get("description") == "todo list group name" + + +def test_parse_parameters_post_request_body(): + """Verify whether the post request body parameter can be successfully parsed""" + openapi_fcs: list[KernelFunctionFromMethod] = create_functions_from_openapi( + plugin_name="todo", + openapi_document_path=os.path.join(current_dir, "openapi_todo.yaml"), + execution_settings=None, + ) + + add_todo_list: list[KernelFunctionMetadata] = [ + f.metadata for f in openapi_fcs if f.metadata.name == "add_todo_list" + ] + + assert add_todo_list + + add_todo_params: list[KernelParameterMetadata] = add_todo_list[0].parameters + + assert add_todo_params + assert add_todo_params[0].name == "task" + assert add_todo_params[0].description == "task name" + assert add_todo_params[0].is_required + assert add_todo_params[0].schema_data + assert add_todo_params[0].schema_data.get("type") == "string" + assert add_todo_params[0].schema_data.get("description") == "task name" + + assert add_todo_params[1].name == "listName" + assert add_todo_params[1].description == "task group name" + assert not add_todo_params[1].is_required + assert add_todo_params[1].schema_data + assert add_todo_params[1].schema_data.get("type") == "string" + assert add_todo_params[1].schema_data.get("description") == "task group name" + + def test_get_payload_properties_schema_none(): parser = OpenApiParser() properties = parser._get_payload_properties("operation_id", None, []) diff --git a/python/tests/unit/contents/test_chat_message_content.py b/python/tests/unit/contents/test_chat_message_content.py index 10997b9a0d98..9e7dcaa07b8a 100644 --- a/python/tests/unit/contents/test_chat_message_content.py +++ b/python/tests/unit/contents/test_chat_message_content.py @@ -284,8 +284,98 @@ def test_cmc_to_dict_keys(): "content": [{"type": "text", "text": "Hello, "}, {"type": "text", "text": "world!"}], }, ), + ( + { + "role": "user", + "items": [ + {"content_type": "text", "text": "Hello, "}, + {"content_type": "text", "text": "world!"}, + ], + }, + { + "role": "user", + "content": [{"type": "text", "text": "Hello, "}, {"type": "text", "text": "world!"}], + }, + ), + ( + { + "role": "user", + "items": [ + {"content_type": "annotation", "file_id": "test"}, + ], + }, + { + "role": "user", + "content": [{"type": "text", "text": "test None (Start Index=None->End Index=None)"}], + }, + ), + ( + { + "role": "user", + "items": [ + {"content_type": "file_reference", "file_id": "test"}, + ], + }, + { + "role": "user", + "content": [{"file_id": "test"}], + }, + ), + ( + { + "role": "user", + "items": [ + {"content_type": "function_call", "name": "test-test"}, + ], + }, + { + "role": "user", + "content": [{"id": None, "type": "function", "function": {"name": "test-test", "arguments": None}}], + }, + ), + ( + { + "role": "user", + "items": [ + {"content_type": "function_call", "name": "test-test"}, + {"content_type": "function_result", "name": "test-test", "result": "test", "id": "test"}, + ], + }, + { + "role": "user", + "content": [ + {"id": None, "type": "function", "function": {"name": "test-test", "arguments": None}}, + {"tool_call_id": "test", "content": "test"}, + ], + }, + ), + ( + { + "role": "user", + "items": [ + {"content_type": "image", "uri": "http://test"}, + ], + }, + { + "role": "user", + "content": [{"image_url": {"url": "http://test/"}, "type": "image_url"}], + }, + ), + ], + ids=[ + "user_content", + "user_with_name", + "user_item", + "function_call", + "function_result", + "multiple_items", + "multiple_items_serialize", + "annotations_serialize", + "file_reference_serialize", + "function_call_serialize", + "function_result_serialize", + "image_serialize", ], - ids=["user_content", "user_with_name", "user_item", "function_call", "function_result", "multiple_items"], ) def test_cmc_to_dict_items(input_args, expected_dict): message = ChatMessageContent(**input_args) diff --git a/python/tests/unit/contents/test_streaming_annotation_content.py b/python/tests/unit/contents/test_streaming_annotation_content.py new file mode 100644 index 000000000000..08bab455103e --- /dev/null +++ b/python/tests/unit/contents/test_streaming_annotation_content.py @@ -0,0 +1,124 @@ +# Copyright (c) Microsoft. All rights reserved. + +from xml.etree.ElementTree import Element + +import pytest + +from semantic_kernel.contents.streaming_annotation_content import StreamingAnnotationContent + +test_cases = [ + pytest.param(StreamingAnnotationContent(file_id="12345"), id="file_id"), + pytest.param(StreamingAnnotationContent(quote="This is a quote."), id="quote"), + pytest.param(StreamingAnnotationContent(start_index=5, end_index=20), id="indices"), + pytest.param( + StreamingAnnotationContent(file_id="12345", quote="This is a quote.", start_index=5, end_index=20), + id="all_fields", + ), +] + + +def test_create_empty(): + annotation = StreamingAnnotationContent() + assert annotation.file_id is None + assert annotation.quote is None + assert annotation.start_index is None + assert annotation.end_index is None + + +def test_create_file_id(): + annotation = StreamingAnnotationContent(file_id="12345") + assert annotation.file_id == "12345" + + +def test_create_quote(): + annotation = StreamingAnnotationContent(quote="This is a quote.") + assert annotation.quote == "This is a quote." + + +def test_create_indices(): + annotation = StreamingAnnotationContent(start_index=5, end_index=20) + assert annotation.start_index == 5 + assert annotation.end_index == 20 + + +def test_create_all_fields(): + annotation = StreamingAnnotationContent(file_id="12345", quote="This is a quote.", start_index=5, end_index=20) + assert annotation.file_id == "12345" + assert annotation.quote == "This is a quote." + assert annotation.start_index == 5 + assert annotation.end_index == 20 + + +def test_update_file_id(): + annotation = StreamingAnnotationContent() + annotation.file_id = "12345" + assert annotation.file_id == "12345" + + +def test_update_quote(): + annotation = StreamingAnnotationContent() + annotation.quote = "This is a quote." + assert annotation.quote == "This is a quote." + + +def test_update_indices(): + annotation = StreamingAnnotationContent() + annotation.start_index = 5 + annotation.end_index = 20 + assert annotation.start_index == 5 + assert annotation.end_index == 20 + + +def test_to_str(): + annotation = StreamingAnnotationContent(file_id="12345", quote="This is a quote.", start_index=5, end_index=20) + assert ( + str(annotation) + == "StreamingAnnotationContent(file_id=12345, quote=This is a quote., start_index=5, end_index=20)" + ) + + +def test_to_element(): + annotation = StreamingAnnotationContent(file_id="12345", quote="This is a quote.", start_index=5, end_index=20) + element = annotation.to_element() + assert element.tag == "streaming_annotation" + assert element.get("file_id") == "12345" + assert element.get("quote") == "This is a quote." + assert element.get("start_index") == "5" + assert element.get("end_index") == "20" + + +def test_from_element(): + element = Element("StreamingAnnotationContent") + element.set("file_id", "12345") + element.set("quote", "This is a quote.") + element.set("start_index", "5") + element.set("end_index", "20") + annotation = StreamingAnnotationContent.from_element(element) + assert annotation.file_id == "12345" + assert annotation.quote == "This is a quote." + assert annotation.start_index == 5 + assert annotation.end_index == 20 + + +def test_to_dict(): + annotation = StreamingAnnotationContent(file_id="12345", quote="This is a quote.", start_index=5, end_index=20) + assert annotation.to_dict() == { + "type": "text", + "text": f"{annotation.file_id} {annotation.quote} (Start Index={annotation.start_index}->End Index={annotation.end_index})", # noqa: E501 + } + + +@pytest.mark.parametrize("annotation", test_cases) +def test_element_roundtrip(annotation): + element = annotation.to_element() + new_annotation = StreamingAnnotationContent.from_element(element) + assert new_annotation == annotation + + +@pytest.mark.parametrize("annotation", test_cases) +def test_to_dict_call(annotation): + expected_dict = { + "type": "text", + "text": f"{annotation.file_id} {annotation.quote} (Start Index={annotation.start_index}->End Index={annotation.end_index})", # noqa: E501 + } + assert annotation.to_dict() == expected_dict diff --git a/python/tests/unit/contents/test_streaming_chat_message_content.py b/python/tests/unit/contents/test_streaming_chat_message_content.py index 759a4187987b..d9651a45489d 100644 --- a/python/tests/unit/contents/test_streaming_chat_message_content.py +++ b/python/tests/unit/contents/test_streaming_chat_message_content.py @@ -268,6 +268,10 @@ def test_scmc_add(): assert len(combined.items) == 1 assert len(combined.inner_content) == 2 + # Make sure the original inner content is preserved + assert message1.inner_content == "source1" + assert message2.inner_content == "source2" + def test_scmc_add_three(): message1 = StreamingChatMessageContent( @@ -360,6 +364,10 @@ def test_scmc_add_different_items_same_type(message1, message2): combined = message1 + message2 assert len(combined.items) == 2 + # Make sure the original items are preserved + assert len(message1.items) == 1 + assert len(message2.items) == 1 + @pytest.mark.parametrize( "message1, message2", diff --git a/python/tests/unit/contents/test_streaming_file_reference_content.py b/python/tests/unit/contents/test_streaming_file_reference_content.py new file mode 100644 index 000000000000..0a88428789b9 --- /dev/null +++ b/python/tests/unit/contents/test_streaming_file_reference_content.py @@ -0,0 +1,76 @@ +# Copyright (c) Microsoft. All rights reserved. + +from xml.etree.ElementTree import Element + +import pytest + +from semantic_kernel.contents.streaming_file_reference_content import StreamingFileReferenceContent + + +def test_create_empty(): + file_reference = StreamingFileReferenceContent() + assert file_reference.file_id is None + + +def test_create_file_id(): + file_reference = StreamingFileReferenceContent(file_id="12345") + assert file_reference.file_id == "12345" + + +def test_update_file_id(): + file_reference = StreamingFileReferenceContent() + file_reference.file_id = "12345" + assert file_reference.file_id == "12345" + + +def test_to_str(): + file_reference = StreamingFileReferenceContent(file_id="12345") + assert str(file_reference) == "StreamingFileReferenceContent(file_id=12345)" + + +def test_to_element(): + file_reference = StreamingFileReferenceContent(file_id="12345") + element = file_reference.to_element() + assert element.tag == "streaming_file_reference" + assert element.get("file_id") == "12345" + + +def test_from_element(): + element = Element("StreamingFileReferenceContent") + element.set("file_id", "12345") + file_reference = StreamingFileReferenceContent.from_element(element) + assert file_reference.file_id == "12345" + + +def test_to_dict_simple(): + file_reference = StreamingFileReferenceContent(file_id="12345") + assert file_reference.to_dict() == { + "file_id": "12345", + } + + +@pytest.mark.parametrize( + "file_reference", + [ + pytest.param(StreamingFileReferenceContent(file_id="12345"), id="file_id"), + pytest.param(StreamingFileReferenceContent(), id="empty"), + ], +) +def test_element_roundtrip(file_reference): + element = file_reference.to_element() + new_file_reference = StreamingFileReferenceContent.from_element(element) + assert new_file_reference == file_reference + + +@pytest.mark.parametrize( + "file_reference", + [ + pytest.param(StreamingFileReferenceContent(file_id="12345"), id="file_id"), + pytest.param(StreamingFileReferenceContent(), id="empty"), + ], +) +def test_to_dict(file_reference): + expected_dict = { + "file_id": file_reference.file_id, + } + assert file_reference.to_dict() == expected_dict diff --git a/python/tests/unit/core_plugins/test_sessions_python_plugin.py b/python/tests/unit/core_plugins/test_sessions_python_plugin.py index ee7beeec4799..e04325543787 100644 --- a/python/tests/unit/core_plugins/test_sessions_python_plugin.py +++ b/python/tests/unit/core_plugins/test_sessions_python_plugin.py @@ -608,7 +608,7 @@ async def token_cb(): plugin = SessionsPythonTool(auth_callback=token_cb) with pytest.raises( - FunctionExecutionException, match="Failed to retrieve the client auth token with messages: Could not get token." + FunctionExecutionException, match="Failed to retrieve the client auth token with message: Could not get token." ): await plugin._ensure_auth_token() diff --git a/python/tests/unit/functions/test_kernel_arguments.py b/python/tests/unit/functions/test_kernel_arguments.py index 155fda0ec079..9e07a3a14ce7 100644 --- a/python/tests/unit/functions/test_kernel_arguments.py +++ b/python/tests/unit/functions/test_kernel_arguments.py @@ -35,3 +35,14 @@ def test_kernel_arguments_with_execution_settings(): kargs = KernelArguments(settings=[test_pes]) assert kargs is not None assert kargs.execution_settings == {"test": test_pes} + + +def test_kernel_arguments_bool(): + # An empty KernelArguments object should return False + assert not KernelArguments() + # An KernelArguments object with keyword arguments should return True + assert KernelArguments(input=10) + # An KernelArguments object with execution_settings should return True + assert KernelArguments(settings=PromptExecutionSettings(service_id="test")) + # An KernelArguments object with both keyword arguments and execution_settings should return True + assert KernelArguments(input=10, settings=PromptExecutionSettings(service_id="test")) diff --git a/python/tests/unit/schema/test_schema_builder.py b/python/tests/unit/schema/test_schema_builder.py index 015ad12a337c..f6f2b4071983 100644 --- a/python/tests/unit/schema/test_schema_builder.py +++ b/python/tests/unit/schema/test_schema_builder.py @@ -7,6 +7,7 @@ import pytest +from semantic_kernel.connectors.utils.structured_output_schema import generate_structured_output_response_format_schema from semantic_kernel.kernel_pydantic import KernelBaseModel from semantic_kernel.schema.kernel_json_schema_builder import KernelJsonSchemaBuilder @@ -65,6 +66,26 @@ class MockModel: } +class PydanticStep(KernelBaseModel): + explanation: str + output: str + + +class PydanticReasoning(KernelBaseModel): + steps: list[PydanticStep] + final_answer: str + + +class NonPydanticStep: + explanation: str + output: str + + +class NonPydanticReasoning: + steps: list[NonPydanticStep] + final_answer: str + + def test_build_with_kernel_base_model(): expected_schema = { "type": "object", @@ -368,3 +389,69 @@ def test_handle_complex_type(): schema = KernelJsonSchemaBuilder.handle_complex_type(str, "Description") expected_schema = {"type": "string", "description": "Description"} assert schema == expected_schema + + +def test_build_schema_with_pydantic_structured_output(): + schema = KernelJsonSchemaBuilder.build(parameter_type=PydanticReasoning, structured_output=True) + structured_output_schema = generate_structured_output_response_format_schema(name="Reasoning", schema=schema) + + expected_schema = { + "type": "json_schema", + "json_schema": { + "name": "Reasoning", + "schema": { + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": {"explanation": {"type": "string"}, "output": {"type": "string"}}, + "required": ["explanation", "output"], + "additionalProperties": False, + }, + }, + "final_answer": {"type": "string"}, + }, + "required": ["steps", "final_answer"], + "additionalProperties": False, + }, + "strict": True, + }, + } + + assert structured_output_schema == expected_schema + + +def test_build_schema_with_nonpydantic_structured_output(): + schema = KernelJsonSchemaBuilder.build(parameter_type=NonPydanticReasoning, structured_output=True) + structured_output_schema = generate_structured_output_response_format_schema( + name="NonPydanticReasoning", schema=schema + ) + + expected_schema = { + "type": "json_schema", + "json_schema": { + "name": "NonPydanticReasoning", + "schema": { + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": {"explanation": {"type": "string"}, "output": {"type": "string"}}, + "required": ["explanation", "output"], + "additionalProperties": False, + }, + }, + "final_answer": {"type": "string"}, + }, + "required": ["steps", "final_answer"], + "additionalProperties": False, + }, + "strict": True, + }, + } + + assert structured_output_schema == expected_schema diff --git a/python/tests/unit/utils/model_diagnostics/conftest.py b/python/tests/unit/utils/model_diagnostics/conftest.py new file mode 100644 index 000000000000..720abb17dfe4 --- /dev/null +++ b/python/tests/unit/utils/model_diagnostics/conftest.py @@ -0,0 +1,98 @@ +# Copyright (c) Microsoft. All rights reserved. + + +import sys +from collections.abc import AsyncGenerator +from typing import Any, ClassVar + +import pytest + +if sys.version_info >= (3, 12): + from typing import override # pragma: no cover +else: + from typing_extensions import override # pragma: no cover + +import semantic_kernel +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.contents.chat_history import ChatHistory +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.utils.telemetry.model_diagnostics.model_diagnostics_settings import ModelDiagnosticSettings + + +@pytest.fixture() +def model_diagnostics_unit_test_env(monkeypatch): + """Fixture to set environment variables for Model Diagnostics Unit Tests.""" + env_vars = { + "SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS": "true", + "SEMANTICKERNEL_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE": "true", + } + + for key, value in env_vars.items(): + monkeypatch.setenv(key, value) + + # Need to reload the settings to pick up the new environment variables since the + # settings are loaded at import time and this fixture is called after the import + semantic_kernel.utils.telemetry.model_diagnostics.decorators.MODEL_DIAGNOSTICS_SETTINGS = ( + ModelDiagnosticSettings.create() + ) + + +@pytest.fixture() +def service_env_vars(monkeypatch, request): + """Fixture to set environment variables for AI Service Unit Tests.""" + for key, value in request.param.items(): + monkeypatch.setenv(key, value) + + +class MockChatCompletion(ChatCompletionClientBase): + MODEL_PROVIDER_NAME: ClassVar[str] = "mock" + + @override + async def _inner_get_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> list["ChatMessageContent"]: + return [] + + @override + async def _inner_get_streaming_chat_message_contents( + self, + chat_history: "ChatHistory", + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingChatMessageContent"], Any]: + yield [] + + @override + def service_url(self) -> str | None: + return "http://mock-service-url" + + +class MockTextCompletion(TextCompletionClientBase): + MODEL_PROVIDER_NAME: ClassVar[str] = "mock" + + @override + async def _inner_get_text_contents( + self, + prompt: str, + settings: "PromptExecutionSettings", + ) -> list["TextContent"]: + return [] + + @override + async def _inner_get_streaming_text_contents( + self, + prompt: str, + settings: "PromptExecutionSettings", + ) -> AsyncGenerator[list["StreamingTextContent"], Any]: + yield [] + + @override + def service_url(self) -> str | None: + # Returning None to test the case where the service URL is not available + return None diff --git a/python/tests/unit/utils/model_diagnostics/test_decorated.py b/python/tests/unit/utils/model_diagnostics/test_decorated.py new file mode 100644 index 000000000000..3d28d64e2b52 --- /dev/null +++ b/python/tests/unit/utils/model_diagnostics/test_decorated.py @@ -0,0 +1,151 @@ +# Copyright (c) Microsoft. All rights reserved. + +import pytest + +from semantic_kernel.connectors.ai.anthropic.services.anthropic_chat_completion import AnthropicChatCompletion +from semantic_kernel.connectors.ai.azure_ai_inference.services.azure_ai_inference_chat_completion import ( + AzureAIInferenceChatCompletion, +) +from semantic_kernel.connectors.ai.google.google_ai.services.google_ai_chat_completion import GoogleAIChatCompletion +from semantic_kernel.connectors.ai.google.google_ai.services.google_ai_text_completion import GoogleAITextCompletion +from semantic_kernel.connectors.ai.google.vertex_ai.services.vertex_ai_chat_completion import VertexAIChatCompletion +from semantic_kernel.connectors.ai.google.vertex_ai.services.vertex_ai_text_completion import VertexAITextCompletion +from semantic_kernel.connectors.ai.mistral_ai.services.mistral_ai_chat_completion import MistralAIChatCompletion +from semantic_kernel.connectors.ai.ollama.services.ollama_chat_completion import OllamaChatCompletion +from semantic_kernel.connectors.ai.ollama.services.ollama_text_completion import OllamaTextCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion +from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import OpenAITextCompletion + +pytestmark = pytest.mark.parametrize( + "decorated_method, expected_attribute", + [ + # OpenAIChatCompletion + pytest.param( + OpenAIChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="OpenAIChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + OpenAIChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="OpenAIChatCompletion._inner_get_streaming_chat_message_contents", + ), + # OpenAITextCompletion + pytest.param( + OpenAITextCompletion._inner_get_text_contents, + "__model_diagnostics_text_completion__", + id="OpenAITextCompletion._inner_get_text_contents", + ), + pytest.param( + OpenAITextCompletion._inner_get_streaming_text_contents, + "__model_diagnostics_streaming_text_completion__", + id="OpenAITextCompletion._inner_get_streaming_text_contents", + ), + # OllamaChatCompletion + pytest.param( + OllamaChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="OllamaChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + OllamaChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="OllamaChatCompletion._inner_get_streaming_chat_message_contents", + ), + # OllamaTextCompletion + pytest.param( + OllamaTextCompletion._inner_get_text_contents, + "__model_diagnostics_text_completion__", + id="OllamaTextCompletion._inner_get_text_contents", + ), + pytest.param( + OllamaTextCompletion._inner_get_streaming_text_contents, + "__model_diagnostics_streaming_text_completion__", + id="OllamaTextCompletion._inner_get_streaming_text_contents", + ), + # MistralAIChatCompletion + pytest.param( + MistralAIChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="MistralAIChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + MistralAIChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="MistralAIChatCompletion._inner_get_streaming_chat_message_contents", + ), + # VertexAIChatCompletion + pytest.param( + VertexAIChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="VertexAIChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + VertexAIChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="VertexAIChatCompletion._inner_get_streaming_chat_message_contents", + ), + # VertexAITextCompletion + pytest.param( + VertexAITextCompletion._inner_get_text_contents, + "__model_diagnostics_text_completion__", + id="VertexAITextCompletion._inner_get_text_contents", + ), + pytest.param( + VertexAITextCompletion._inner_get_streaming_text_contents, + "__model_diagnostics_streaming_text_completion__", + id="VertexAITextCompletion._inner_get_streaming_text_contents", + ), + # GoogleAIChatCompletion + pytest.param( + GoogleAIChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="GoogleAIChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + GoogleAIChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="GoogleAIChatCompletion._inner_get_streaming_chat_message_contents", + ), + # GoogleAITextCompletion + pytest.param( + GoogleAITextCompletion._inner_get_text_contents, + "__model_diagnostics_text_completion__", + id="GoogleAITextCompletion._inner_get_text_contents", + ), + pytest.param( + GoogleAITextCompletion._inner_get_streaming_text_contents, + "__model_diagnostics_streaming_text_completion__", + id="GoogleAITextCompletion._inner_get_streaming_text_contents", + ), + # AzureAIInferenceChatCompletion + pytest.param( + AzureAIInferenceChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="AzureAIInferenceChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + AzureAIInferenceChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="AzureAIInferenceChatCompletion._inner_get_streaming_chat_message_contents", + ), + # AnthropicChatCompletion + pytest.param( + AnthropicChatCompletion._inner_get_chat_message_contents, + "__model_diagnostics_chat_completion__", + id="AnthropicChatCompletion._inner_get_chat_message_contents", + ), + pytest.param( + AnthropicChatCompletion._inner_get_streaming_chat_message_contents, + "__model_diagnostics_streaming_chat_completion__", + id="AnthropicChatCompletion._inner_get_streaming_chat_message_contents", + ), + ], +) + + +def test_decorated(decorated_method, expected_attribute): + """Test that the connectors are being decorated properly with the model diagnostics decorators.""" + assert hasattr(decorated_method, expected_attribute) and getattr( + decorated_method, expected_attribute + ), f"{decorated_method} should be decorated with the appropriate model diagnostics decorator." diff --git a/python/tests/unit/utils/model_diagnostics/test_trace_chat_completion.py b/python/tests/unit/utils/model_diagnostics/test_trace_chat_completion.py new file mode 100644 index 000000000000..4b14c1c92156 --- /dev/null +++ b/python/tests/unit/utils/model_diagnostics/test_trace_chat_completion.py @@ -0,0 +1,174 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import patch + +import pytest +from opentelemetry.trace import StatusCode + +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.contents.utils.finish_reason import FinishReason +from semantic_kernel.exceptions.service_exceptions import ServiceResponseException +from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + CHAT_COMPLETION_OPERATION, + _messages_to_openai_format, + trace_chat_completion, +) +from tests.unit.utils.model_diagnostics.conftest import MockChatCompletion + +pytestmark = pytest.mark.parametrize( + "execution_settings, mock_response", + [ + pytest.param( + PromptExecutionSettings( + extension_data={ + "max_tokens": 1000, + "temperature": 0.5, + "top_p": 0.9, + } + ), + [ + ChatMessageContent( + role=AuthorRole.ASSISTANT, + ai_model_id="ai_model_id", + content="Test content", + metadata={"id": "test_id"}, + finish_reason=FinishReason.STOP, + ) + ], + id="test_execution_settings_with_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + ChatMessageContent( + role=AuthorRole.ASSISTANT, + ai_model_id="ai_model_id", + metadata={"id": "test_id"}, + finish_reason=FinishReason.STOP, + ) + ], + id="test_execution_settings_no_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + ChatMessageContent( + role=AuthorRole.ASSISTANT, + ai_model_id="ai_model_id", + metadata={}, + finish_reason=FinishReason.STOP, + ) + ], + id="test_chat_message_content_no_metadata", + ), + pytest.param( + PromptExecutionSettings(), + [ + ChatMessageContent( + role=AuthorRole.ASSISTANT, + ai_model_id="ai_model_id", + metadata={"id": "test_id"}, + ) + ], + id="test_chat_message_content_no_finish_reason", + ), + ], +) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_chat_completion( + mock_span, + execution_settings, + mock_response, + chat_history, + model_diagnostics_unit_test_env, +): + # Setup + chat_completion: ChatCompletionClientBase = MockChatCompletion(ai_model_id="ai_model_id") + + with patch.object(MockChatCompletion, "_inner_get_chat_message_contents", return_value=mock_response): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockChatCompletion._inner_get_chat_message_contents = trace_chat_completion( + MockChatCompletion.MODEL_PROVIDER_NAME + )(chat_completion._inner_get_chat_message_contents) + + results: list[ChatMessageContent] = await chat_completion.get_chat_message_contents( + chat_history, execution_settings + ) + + assert results == mock_response + + # Before the call to the model + mock_span.set_attributes.assert_called_with({ + gen_ai_attributes.OPERATION: CHAT_COMPLETION_OPERATION, + gen_ai_attributes.SYSTEM: MockChatCompletion.MODEL_PROVIDER_NAME, + gen_ai_attributes.MODEL: chat_completion.ai_model_id, + }) + + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ADDRESS, chat_completion.service_url()) + + # No all connectors take the same parameters + if execution_settings.extension_data.get("max_tokens") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.MAX_TOKENS, execution_settings.extension_data["max_tokens"] + ) + if execution_settings.extension_data.get("temperature") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.TEMPERATURE, execution_settings.extension_data["temperature"] + ) + if execution_settings.extension_data.get("top_p") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.TOP_P, execution_settings.extension_data["top_p"]) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.PROMPT_EVENT, + {gen_ai_attributes.PROMPT_EVENT_PROMPT: _messages_to_openai_format(chat_history)}, + ) + + # After the call to the model + # Not all connectors return the same metadata + if mock_response[0].metadata.get("id") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.RESPONSE_ID, mock_response[0].metadata["id"]) + if any(completion.finish_reason is not None for completion in mock_response): + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.FINISH_REASON, + ",".join([str(completion.finish_reason) for completion in mock_response]), + ) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.COMPLETION_EVENT, + {gen_ai_attributes.COMPLETION_EVENT_COMPLETION: _messages_to_openai_format(mock_response)}, + ) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_chat_completion_exception( + mock_span, + execution_settings, + mock_response, + chat_history, + model_diagnostics_unit_test_env, +): + # Setup + chat_completion: ChatCompletionClientBase = MockChatCompletion(ai_model_id="ai_model_id") + + with patch.object(MockChatCompletion, "_inner_get_chat_message_contents", side_effect=ServiceResponseException()): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockChatCompletion._inner_get_chat_message_contents = trace_chat_completion( + MockChatCompletion.MODEL_PROVIDER_NAME + )(chat_completion._inner_get_chat_message_contents) + + with pytest.raises(ServiceResponseException): + await chat_completion.get_chat_message_contents(chat_history, execution_settings) + + exception = ServiceResponseException() + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ERROR_TYPE, str(type(exception))) + mock_span.set_status.assert_any_call(StatusCode.ERROR, repr(exception)) + + mock_span.end.assert_any_call() diff --git a/python/tests/unit/utils/model_diagnostics/test_trace_streaming_chat_completion.py b/python/tests/unit/utils/model_diagnostics/test_trace_streaming_chat_completion.py new file mode 100644 index 000000000000..911cd622cf9e --- /dev/null +++ b/python/tests/unit/utils/model_diagnostics/test_trace_streaming_chat_completion.py @@ -0,0 +1,190 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import AsyncGenerator +from functools import reduce +from unittest.mock import MagicMock, patch + +import pytest +from opentelemetry.trace import StatusCode + +from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole +from semantic_kernel.contents.utils.finish_reason import FinishReason +from semantic_kernel.exceptions.service_exceptions import ServiceResponseException +from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + CHAT_STREAMING_COMPLETION_OPERATION, + _messages_to_openai_format, + trace_streaming_chat_completion, +) +from tests.unit.utils.model_diagnostics.conftest import MockChatCompletion + +pytestmark = pytest.mark.parametrize( + "execution_settings, mock_response", + [ + pytest.param( + PromptExecutionSettings( + extension_data={ + "max_tokens": 1000, + "temperature": 0.5, + "top_p": 0.9, + } + ), + [ + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + ai_model_id="ai_model_id", + content="Test content", + metadata={"id": "test_id"}, + finish_reason=FinishReason.STOP, + ) + ], + id="test_execution_settings_with_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + ai_model_id="ai_model_id", + metadata={"id": "test_id"}, + finish_reason=FinishReason.STOP, + ) + ], + id="test_execution_settings_no_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + ai_model_id="ai_model_id", + metadata={}, + finish_reason=FinishReason.STOP, + ) + ], + id="test_chat_message_content_no_metadata", + ), + pytest.param( + PromptExecutionSettings(), + [ + StreamingChatMessageContent( + role=AuthorRole.ASSISTANT, + choice_index=0, + ai_model_id="ai_model_id", + metadata={"id": "test_id"}, + ) + ], + id="test_chat_message_content_no_finish_reason", + ), + ], +) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_streaming_chat_completion( + mock_span, + execution_settings, + mock_response, + chat_history, + model_diagnostics_unit_test_env, +): + # Setup + chat_completion: ChatCompletionClientBase = MockChatCompletion(ai_model_id="ai_model_id") + iterable = MagicMock(spec=AsyncGenerator) + iterable.__aiter__.return_value = [mock_response] + + with patch.object(MockChatCompletion, "_inner_get_streaming_chat_message_contents", return_value=iterable): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockChatCompletion._inner_get_streaming_chat_message_contents = trace_streaming_chat_completion( + MockChatCompletion.MODEL_PROVIDER_NAME + )(chat_completion._inner_get_streaming_chat_message_contents) + + updates = [] + async for update in chat_completion._inner_get_streaming_chat_message_contents( + chat_history, execution_settings + ): + updates.append(update) + updates_flatten = [reduce(lambda x, y: x + y, messages) for messages in updates] + + assert updates_flatten == mock_response + + # Before the call to the model + mock_span.set_attributes.assert_called_with({ + gen_ai_attributes.OPERATION: CHAT_STREAMING_COMPLETION_OPERATION, + gen_ai_attributes.SYSTEM: MockChatCompletion.MODEL_PROVIDER_NAME, + gen_ai_attributes.MODEL: chat_completion.ai_model_id, + }) + + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ADDRESS, chat_completion.service_url()) + + # No all connectors take the same parameters + if execution_settings.extension_data.get("max_tokens") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.MAX_TOKENS, execution_settings.extension_data["max_tokens"] + ) + if execution_settings.extension_data.get("temperature") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.TEMPERATURE, execution_settings.extension_data["temperature"] + ) + if execution_settings.extension_data.get("top_p") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.TOP_P, execution_settings.extension_data["top_p"]) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.PROMPT_EVENT, + {gen_ai_attributes.PROMPT_EVENT_PROMPT: _messages_to_openai_format(chat_history)}, + ) + + # After the call to the model + # Not all connectors return the same metadata + if mock_response[0].metadata.get("id") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.RESPONSE_ID, mock_response[0].metadata["id"]) + if any(completion.finish_reason is not None for completion in mock_response): + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.FINISH_REASON, + ",".join([str(completion.finish_reason) for completion in mock_response]), + ) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.COMPLETION_EVENT, + {gen_ai_attributes.COMPLETION_EVENT_COMPLETION: _messages_to_openai_format(mock_response)}, + ) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_streaming_chat_completion_exception( + mock_span, + execution_settings, + mock_response, + chat_history, + model_diagnostics_unit_test_env, +): + # Setup + chat_completion: ChatCompletionClientBase = MockChatCompletion(ai_model_id="ai_model_id") + + with patch.object( + MockChatCompletion, "_inner_get_streaming_chat_message_contents", side_effect=ServiceResponseException() + ): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockChatCompletion._inner_get_streaming_chat_message_contents = trace_streaming_chat_completion( + MockChatCompletion.MODEL_PROVIDER_NAME + )(chat_completion._inner_get_streaming_chat_message_contents) + + with pytest.raises(ServiceResponseException): + async for update in chat_completion._inner_get_streaming_chat_message_contents( + chat_history, execution_settings + ): + pass + + exception = ServiceResponseException() + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ERROR_TYPE, str(type(exception))) + mock_span.set_status.assert_any_call(StatusCode.ERROR, repr(exception)) + + mock_span.end.assert_any_call() diff --git a/python/tests/unit/utils/model_diagnostics/test_trace_streaming_text_completion.py b/python/tests/unit/utils/model_diagnostics/test_trace_streaming_text_completion.py new file mode 100644 index 000000000000..2a5fefc73e6b --- /dev/null +++ b/python/tests/unit/utils/model_diagnostics/test_trace_streaming_text_completion.py @@ -0,0 +1,162 @@ +# Copyright (c) Microsoft. All rights reserved. + +from collections.abc import AsyncGenerator +from functools import reduce +from unittest.mock import ANY, MagicMock, patch + +import pytest +from opentelemetry.trace import StatusCode + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.contents.streaming_text_content import StreamingTextContent +from semantic_kernel.exceptions.service_exceptions import ServiceResponseException +from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + TEXT_STREAMING_COMPLETION_OPERATION, + _messages_to_openai_format, + trace_streaming_text_completion, +) +from tests.unit.utils.model_diagnostics.conftest import MockTextCompletion + +pytestmark = pytest.mark.parametrize( + "execution_settings, mock_response", + [ + pytest.param( + PromptExecutionSettings( + extension_data={ + "max_tokens": 1000, + "temperature": 0.5, + "top_p": 0.9, + } + ), + [ + StreamingTextContent( + choice_index=0, + ai_model_id="ai_model_id", + text="Test content", + metadata={"id": "test_id"}, + ) + ], + id="test_execution_settings_with_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + StreamingTextContent( + choice_index=0, + ai_model_id="ai_model_id", + text="Test content", + metadata={"id": "test_id"}, + ) + ], + id="test_execution_settings_no_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + StreamingTextContent( + choice_index=0, + ai_model_id="ai_model_id", + text="Test content", + metadata={}, + ) + ], + id="test_text_content_no_metadata", + ), + ], +) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_streaming_text_completion( + mock_span, + execution_settings, + mock_response, + prompt, + model_diagnostics_unit_test_env, +): + # Setup + text_completion: TextCompletionClientBase = MockTextCompletion(ai_model_id="ai_model_id") + iterable = MagicMock(spec=AsyncGenerator) + iterable.__aiter__.return_value = [mock_response] + + with patch.object(MockTextCompletion, "_inner_get_streaming_text_contents", return_value=iterable): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockTextCompletion._inner_get_streaming_text_contents = trace_streaming_text_completion( + MockTextCompletion.MODEL_PROVIDER_NAME + )(text_completion._inner_get_streaming_text_contents) + + updates = [] + async for update in text_completion.get_streaming_text_contents(prompt=prompt, settings=execution_settings): + updates.append(update) + updates_flatten = [reduce(lambda x, y: x + y, update) for update in updates] + + assert updates_flatten == mock_response + + # Before the call to the model + mock_span.set_attributes.assert_called_with({ + gen_ai_attributes.OPERATION: TEXT_STREAMING_COMPLETION_OPERATION, + gen_ai_attributes.SYSTEM: MockTextCompletion.MODEL_PROVIDER_NAME, + gen_ai_attributes.MODEL: text_completion.ai_model_id, + }) + + with pytest.raises(AssertionError): + # The service_url attribute is not set for text completion + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ADDRESS, ANY) + + # No all connectors take the same parameters + if execution_settings.extension_data.get("max_tokens") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.MAX_TOKENS, execution_settings.extension_data["max_tokens"] + ) + if execution_settings.extension_data.get("temperature") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.TEMPERATURE, execution_settings.extension_data["temperature"] + ) + if execution_settings.extension_data.get("top_p") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.TOP_P, execution_settings.extension_data["top_p"]) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.PROMPT_EVENT, {gen_ai_attributes.PROMPT_EVENT_PROMPT: prompt} + ) + + # After the call to the model + # Not all connectors return the same metadata + if mock_response[0].metadata.get("id") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.RESPONSE_ID, mock_response[0].metadata["id"]) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.COMPLETION_EVENT, + {gen_ai_attributes.COMPLETION_EVENT_COMPLETION: _messages_to_openai_format(mock_response)}, + ) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_streaming_text_completion_exception( + mock_span, + execution_settings, + mock_response, + prompt, + model_diagnostics_unit_test_env, +): + # Setup + text_completion: TextCompletionClientBase = MockTextCompletion(ai_model_id="ai_model_id") + + with patch.object(MockTextCompletion, "_inner_get_streaming_text_contents", side_effect=ServiceResponseException()): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockTextCompletion._inner_get_streaming_text_contents = trace_streaming_text_completion( + MockTextCompletion.MODEL_PROVIDER_NAME + )(text_completion._inner_get_streaming_text_contents) + + with pytest.raises(ServiceResponseException): + async for update in text_completion.get_streaming_text_contents(prompt=prompt, settings=execution_settings): + pass + + exception = ServiceResponseException() + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ERROR_TYPE, str(type(exception))) + mock_span.set_status.assert_any_call(StatusCode.ERROR, repr(exception)) + + mock_span.end.assert_any_call() diff --git a/python/tests/unit/utils/model_diagnostics/test_trace_text_completion.py b/python/tests/unit/utils/model_diagnostics/test_trace_text_completion.py new file mode 100644 index 000000000000..fe6d30f25830 --- /dev/null +++ b/python/tests/unit/utils/model_diagnostics/test_trace_text_completion.py @@ -0,0 +1,154 @@ +# Copyright (c) Microsoft. All rights reserved. + +from unittest.mock import ANY, patch + +import pytest +from opentelemetry.trace import StatusCode + +from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings +from semantic_kernel.connectors.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.contents.chat_message_content import ChatMessageContent +from semantic_kernel.contents.text_content import TextContent +from semantic_kernel.exceptions.service_exceptions import ServiceResponseException +from semantic_kernel.utils.telemetry.model_diagnostics import gen_ai_attributes +from semantic_kernel.utils.telemetry.model_diagnostics.decorators import ( + TEXT_COMPLETION_OPERATION, + _messages_to_openai_format, + trace_text_completion, +) +from tests.unit.utils.model_diagnostics.conftest import MockTextCompletion + +pytestmark = pytest.mark.parametrize( + "execution_settings, mock_response", + [ + pytest.param( + PromptExecutionSettings( + extension_data={ + "max_tokens": 1000, + "temperature": 0.5, + "top_p": 0.9, + } + ), + [ + TextContent( + ai_model_id="ai_model_id", + text="Test content", + metadata={"id": "test_id"}, + ) + ], + id="test_execution_settings_with_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + TextContent( + ai_model_id="ai_model_id", + text="Test content", + metadata={"id": "test_id"}, + ) + ], + id="test_execution_settings_no_extension_data", + ), + pytest.param( + PromptExecutionSettings(), + [ + TextContent( + ai_model_id="ai_model_id", + text="Test content", + metadata={}, + ) + ], + id="test_text_content_no_metadata", + ), + ], +) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_text_completion( + mock_span, + execution_settings, + mock_response, + prompt, + model_diagnostics_unit_test_env, +): + # Setup + text_completion: TextCompletionClientBase = MockTextCompletion(ai_model_id="ai_model_id") + + with patch.object(MockTextCompletion, "_inner_get_text_contents", return_value=mock_response): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockTextCompletion._inner_get_text_contents = trace_text_completion(MockTextCompletion.MODEL_PROVIDER_NAME)( + text_completion._inner_get_text_contents + ) + + results: list[ChatMessageContent] = await text_completion.get_text_contents( + prompt=prompt, settings=execution_settings + ) + + assert results == mock_response + + # Before the call to the model + mock_span.set_attributes.assert_called_with({ + gen_ai_attributes.OPERATION: TEXT_COMPLETION_OPERATION, + gen_ai_attributes.SYSTEM: MockTextCompletion.MODEL_PROVIDER_NAME, + gen_ai_attributes.MODEL: text_completion.ai_model_id, + }) + + with pytest.raises(AssertionError): + # The service_url attribute is not set for text completion + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ADDRESS, ANY) + + # No all connectors take the same parameters + if execution_settings.extension_data.get("max_tokens") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.MAX_TOKENS, execution_settings.extension_data["max_tokens"] + ) + if execution_settings.extension_data.get("temperature") is not None: + mock_span.set_attribute.assert_any_call( + gen_ai_attributes.TEMPERATURE, execution_settings.extension_data["temperature"] + ) + if execution_settings.extension_data.get("top_p") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.TOP_P, execution_settings.extension_data["top_p"]) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.PROMPT_EVENT, {gen_ai_attributes.PROMPT_EVENT_PROMPT: prompt} + ) + + # After the call to the model + # Not all connectors return the same metadata + if mock_response[0].metadata.get("id") is not None: + mock_span.set_attribute.assert_any_call(gen_ai_attributes.RESPONSE_ID, mock_response[0].metadata["id"]) + + mock_span.add_event.assert_any_call( + gen_ai_attributes.COMPLETION_EVENT, + {gen_ai_attributes.COMPLETION_EVENT_COMPLETION: _messages_to_openai_format(mock_response)}, + ) + + +@pytest.mark.asyncio +@patch("opentelemetry.trace.INVALID_SPAN") # When no tracer provider is available, the span will be an INVALID_SPAN +async def test_trace_text_completion_exception( + mock_span, + execution_settings, + mock_response, + prompt, + model_diagnostics_unit_test_env, +): + # Setup + text_completion: TextCompletionClientBase = MockTextCompletion(ai_model_id="ai_model_id") + + with patch.object(MockTextCompletion, "_inner_get_text_contents", side_effect=ServiceResponseException()): + # We need to reapply the decorator to the method since the mock will not have the decorator applied + MockTextCompletion._inner_get_text_contents = trace_text_completion(MockTextCompletion.MODEL_PROVIDER_NAME)( + text_completion._inner_get_text_contents + ) + + with pytest.raises(ServiceResponseException): + await text_completion.get_text_contents(prompt=prompt, settings=execution_settings) + + exception = ServiceResponseException() + mock_span.set_attribute.assert_any_call(gen_ai_attributes.ERROR_TYPE, str(type(exception))) + mock_span.set_status.assert_any_call(StatusCode.ERROR, repr(exception)) + + mock_span.end.assert_any_call() diff --git a/python/tests/unit/utils/test_tracing.py b/python/tests/unit/utils/test_tracing.py deleted file mode 100644 index 5d2c2f9e4bf6..000000000000 --- a/python/tests/unit/utils/test_tracing.py +++ /dev/null @@ -1,241 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from unittest.mock import patch - -import pytest -from openai.types import Completion as TextCompletion -from openai.types import CompletionChoice -from opentelemetry.trace import StatusCode - -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion -from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base import OpenAIChatCompletionBase -from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion import OpenAITextCompletion -from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.contents.chat_message_content import ChatMessageContent -from semantic_kernel.contents.text_content import TextContent -from semantic_kernel.contents.utils.author_role import AuthorRole -from semantic_kernel.contents.utils.finish_reason import FinishReason -from semantic_kernel.exceptions.service_exceptions import ServiceResponseException -from semantic_kernel.utils.telemetry.const import ( - CHAT_COMPLETION_OPERATION, - COMPLETION_EVENT, - COMPLETION_EVENT_COMPLETION, - ERROR_TYPE, - FINISH_REASON, - MAX_TOKENS, - MODEL, - OPERATION, - PROMPT_EVENT, - PROMPT_EVENT_PROMPT, - RESPONSE_ID, - SYSTEM, - TEMPERATURE, - TEXT_COMPLETION_OPERATION, - TOP_P, -) - -TEST_CONTENT = "Test content" -TEST_RESPONSE_ID = "dummy_id" -TEST_MAX_TOKENS = "1000" -TEST_MODEL = "dummy_model" -TEST_TEMPERATURE = "0.5" -TEST_TOP_P = "0.9" -TEST_CREATED_AT = 1 -TEST_TEXT_PROMPT = "Test prompt" -EXPECTED_CHAT_COMPLETION_EVENT_PAYLOAD = f'[{{"role": "assistant", "content": "{TEST_CONTENT}"}}]' -EXPECTED_TEXT_COMPLETION_EVENT_PAYLOAD = f'["{TEST_CONTENT}"]' - -TEST_CHAT_RESPONSE = [ - ChatMessageContent( - role=AuthorRole.ASSISTANT, - ai_model_id=TEST_MODEL, - content=TEST_CONTENT, - metadata={"id": TEST_RESPONSE_ID}, - finish_reason=FinishReason.STOP, - ) -] - -TEST_TEXT_RESPONSE = TextCompletion( - model=TEST_MODEL, - text=TEST_CONTENT, - id=TEST_RESPONSE_ID, - choices=[CompletionChoice(index=0, text=TEST_CONTENT, finish_reason="stop")], - created=TEST_CREATED_AT, - object="text_completion", -) - -TEST_TEXT_RESPONSE_METADATA = { - "id": TEST_RESPONSE_ID, - "created": TEST_CREATED_AT, - "system_fingerprint": None, - "logprobs": None, - "usage": None, -} - -EXPECTED_TEXT_CONTENT = [ - TextContent( - ai_model_id=TEST_MODEL, - text=TEST_CONTENT, - encoding=None, - metadata=TEST_TEXT_RESPONSE_METADATA, - inner_content=TEST_TEXT_RESPONSE, - ) -] - - -@pytest.mark.asyncio -@patch("semantic_kernel.utils.telemetry.decorators.are_model_diagnostics_enabled", return_value=True) -@patch("semantic_kernel.utils.telemetry.decorators.are_sensitive_events_enabled", return_value=True) -@patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base.OpenAIChatCompletionBase._send_chat_request", - return_value=TEST_CHAT_RESPONSE, -) -@patch("opentelemetry.trace.INVALID_SPAN") -async def test_trace_chat_completion( - mock_span, - mock_send_chat_request, - mock_sensitive_events_enabled, - mock_model_diagnostics_enabled, - openai_unit_test_env, -): - chat_completion = OpenAIChatCompletion(ai_model_id=TEST_MODEL, env_file_path="test.env") - extension_data = {"max_tokens": TEST_MAX_TOKENS, "temperature": TEST_TEMPERATURE, "top_p": TEST_TOP_P} - - results: list[ChatMessageContent] = await chat_completion.get_chat_message_contents( - chat_history=ChatHistory(), settings=PromptExecutionSettings(extension_data=extension_data) - ) - - assert results == TEST_CHAT_RESPONSE - - mock_span.set_attributes.assert_called_with( - { - OPERATION: CHAT_COMPLETION_OPERATION, - SYSTEM: OpenAIChatCompletionBase.MODEL_PROVIDER_NAME, - MODEL: TEST_MODEL, - } - ) - mock_span.set_attribute.assert_any_call(MAX_TOKENS, TEST_MAX_TOKENS) - mock_span.set_attribute.assert_any_call(TEMPERATURE, TEST_TEMPERATURE) - mock_span.set_attribute.assert_any_call(TOP_P, TEST_TOP_P) - mock_span.add_event.assert_any_call(PROMPT_EVENT, {PROMPT_EVENT_PROMPT: "[]"}) - - mock_span.set_attribute.assert_any_call(RESPONSE_ID, TEST_RESPONSE_ID) - mock_span.set_attribute.assert_any_call(FINISH_REASON, str(FinishReason.STOP)) - mock_span.add_event.assert_any_call( - COMPLETION_EVENT, {COMPLETION_EVENT_COMPLETION: EXPECTED_CHAT_COMPLETION_EVENT_PAYLOAD} - ) - - -@pytest.mark.asyncio -@patch("semantic_kernel.utils.telemetry.decorators.are_model_diagnostics_enabled", return_value=True) -@patch("semantic_kernel.utils.telemetry.decorators.are_sensitive_events_enabled", return_value=True) -@patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base.OpenAITextCompletionBase._send_request", - return_value=TEST_TEXT_RESPONSE, -) -@patch("opentelemetry.trace.INVALID_SPAN") -async def test_trace_text_completion( - mock_span, mock_send_request, mock_sensitive_events_enabled, mock_model_diagnostics_enabled, openai_unit_test_env -): - chat_completion = OpenAITextCompletion(ai_model_id=TEST_MODEL, env_file_path="test.env") - extension_data = {"max_tokens": TEST_MAX_TOKENS, "temperature": TEST_TEMPERATURE, "top_p": TEST_TOP_P} - - results: list[TextContent] = await chat_completion.get_text_contents( - prompt=TEST_TEXT_PROMPT, settings=PromptExecutionSettings(extension_data=extension_data) - ) - - assert results == EXPECTED_TEXT_CONTENT - - mock_span.set_attributes.assert_called_with( - { - OPERATION: TEXT_COMPLETION_OPERATION, - SYSTEM: OpenAIChatCompletionBase.MODEL_PROVIDER_NAME, - MODEL: TEST_MODEL, - } - ) - mock_span.set_attribute.assert_any_call(MAX_TOKENS, TEST_MAX_TOKENS) - mock_span.set_attribute.assert_any_call(TEMPERATURE, TEST_TEMPERATURE) - mock_span.set_attribute.assert_any_call(TOP_P, TEST_TOP_P) - mock_span.add_event.assert_any_call(PROMPT_EVENT, {PROMPT_EVENT_PROMPT: TEST_TEXT_PROMPT}) - - mock_span.set_attribute.assert_any_call(RESPONSE_ID, TEST_RESPONSE_ID) - mock_span.add_event.assert_any_call( - COMPLETION_EVENT, {COMPLETION_EVENT_COMPLETION: EXPECTED_TEXT_COMPLETION_EVENT_PAYLOAD} - ) - - -@pytest.mark.asyncio -@patch("semantic_kernel.utils.telemetry.decorators.are_model_diagnostics_enabled", return_value=True) -@patch("semantic_kernel.utils.telemetry.decorators.are_sensitive_events_enabled", return_value=True) -@patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion_base.OpenAIChatCompletionBase._send_chat_request", - side_effect=ServiceResponseException, -) -@patch("opentelemetry.trace.INVALID_SPAN") -async def test_trace_chat_completion_exception( - mock_span, - mock_send_chat_request, - mock_sensitive_events_enabled, - mock_model_diagnostics_enabled, - openai_unit_test_env, -): - chat_completion = OpenAIChatCompletion(ai_model_id=TEST_MODEL, env_file_path="test.env") - extension_data = {"max_tokens": TEST_MAX_TOKENS, "temperature": TEST_TEMPERATURE, "top_p": TEST_TOP_P} - - with pytest.raises(ServiceResponseException): - await chat_completion.get_chat_message_contents( - chat_history=ChatHistory(), settings=PromptExecutionSettings(extension_data=extension_data) - ) - - mock_span.set_attributes.assert_called_with( - { - OPERATION: CHAT_COMPLETION_OPERATION, - SYSTEM: OpenAIChatCompletionBase.MODEL_PROVIDER_NAME, - MODEL: TEST_MODEL, - } - ) - - exception = ServiceResponseException() - mock_span.set_attribute.assert_any_call(ERROR_TYPE, str(type(exception))) - mock_span.set_status.assert_any_call(StatusCode.ERROR, repr(exception)) - - mock_span.end.assert_any_call() - - -@pytest.mark.asyncio -@patch("semantic_kernel.utils.telemetry.decorators.are_model_diagnostics_enabled", return_value=True) -@patch("semantic_kernel.utils.telemetry.decorators.are_sensitive_events_enabled", return_value=True) -@patch( - "semantic_kernel.connectors.ai.open_ai.services.open_ai_text_completion_base.OpenAITextCompletionBase._send_request", - side_effect=ServiceResponseException, -) -@patch("opentelemetry.trace.INVALID_SPAN") -async def test_trace_text_completion_exception( - mock_span, - mock_send_chat_request, - mock_sensitive_events_enabled, - mock_model_diagnostics_enabled, - openai_unit_test_env, -): - chat_completion = OpenAITextCompletion(ai_model_id=TEST_MODEL, env_file_path="test.env") - extension_data = {"max_tokens": TEST_MAX_TOKENS, "temperature": TEST_TEMPERATURE, "top_p": TEST_TOP_P} - - with pytest.raises(ServiceResponseException): - await chat_completion.get_text_contents( - prompt=TEST_TEXT_PROMPT, settings=PromptExecutionSettings(extension_data=extension_data) - ) - - mock_span.set_attributes.assert_called_with( - { - OPERATION: TEXT_COMPLETION_OPERATION, - SYSTEM: OpenAIChatCompletionBase.MODEL_PROVIDER_NAME, - MODEL: TEST_MODEL, - } - ) - - exception = ServiceResponseException() - mock_span.set_attribute.assert_any_call(ERROR_TYPE, str(type(exception))) - mock_span.set_status.assert_any_call(StatusCode.ERROR, repr(exception)) - - mock_span.end.assert_any_call() diff --git a/python/uv.lock b/python/uv.lock new file mode 100644 index 000000000000..b6168f0fbb68 --- /dev/null +++ b/python/uv.lock @@ -0,0 +1,5271 @@ +version = 1 +requires-python = ">=3.10, <3.13" +resolution-markers = [ + "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "python_full_version < '3.11' and sys_platform == 'darwin'", + "python_full_version == '3.11.*' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and sys_platform == 'darwin'", + "python_full_version < '3.11' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and sys_platform == 'linux'", + "python_full_version < '3.11' and sys_platform == 'linux'", + "python_full_version == '3.11.*' and sys_platform == 'linux'", + "python_full_version >= '3.12' and sys_platform == 'linux'", + "python_full_version < '3.11' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version < '3.11' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version >= '3.12' and sys_platform == 'win32'", +] +supported-markers = [ + "sys_platform == 'darwin'", + "sys_platform == 'linux'", + "sys_platform == 'win32'", +] + +[[package]] +name = "accelerate" +version = "0.33.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "psutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "safetensors", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "torch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d4/24/5e813a41495ec7fdbc6a0f08e38c099caccf49147b8cd84053f4c3007c35/accelerate-0.33.0.tar.gz", hash = "sha256:11ba481ed6ea09191775df55ce464aeeba67a024bd0261a44b77b30fb439e26a", size = 314567 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/33/b6b4ad5efa8b9f4275d4ed17ff8a44c97276171341ba565fdffb0e3dc5e8/accelerate-0.33.0-py3-none-any.whl", hash = "sha256:0a7f33d60ba09afabd028d4f0856dd19c5a734b7a596d637d9dd6e3d0eadbaf3", size = 315131 }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f7/22bba300a16fd1cad99da1a23793fe43963ee326d012fdf852d0b4035955/aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2", size = 16786 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/b6/58ea188899950d759a837f9a58b2aee1d1a380ea4d6211ce9b1823748851/aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd", size = 12155 }, +] + +[[package]] +name = "aiohttp" +version = "3.10.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "aiosignal", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "async-timeout", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "frozenlist", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "multidict", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "yarl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/28/ca549838018140b92a19001a8628578b0f2a3b38c16826212cc6f706e6d4/aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691", size = 7524360 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/4a/b27dd9b88fe22dde88742b341fd10251746a6ffcfe1c0b8b15b4a8cbd7c1/aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3", size = 587010 }, + { url = "https://files.pythonhosted.org/packages/de/a9/0f7e2b71549c9d641086c423526ae7a10de3b88d03ba104a3df153574d0d/aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6", size = 397698 }, + { url = "https://files.pythonhosted.org/packages/3b/52/26baa486e811c25b0cd16a494038260795459055568713f841e78f016481/aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699", size = 389052 }, + { url = "https://files.pythonhosted.org/packages/33/df/71ba374a3e925539cb2f6e6d4f5326e7b6b200fabbe1b3cc5e6368f07ce7/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6", size = 1248615 }, + { url = "https://files.pythonhosted.org/packages/67/02/bb89c1eba08a27fc844933bee505d63d480caf8e2816c06961d2941cd128/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1", size = 1282930 }, + { url = "https://files.pythonhosted.org/packages/db/36/07d8cfcc37f39c039f93a4210cc71dadacca003609946c63af23659ba656/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f", size = 1317250 }, + { url = "https://files.pythonhosted.org/packages/9a/44/cabeac994bef8ba521b552ae996928afc6ee1975a411385a07409811b01f/aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb", size = 1243212 }, + { url = "https://files.pythonhosted.org/packages/5a/11/23f1e31f5885ac72be52fd205981951dd2e4c87c5b1487cf82fde5bbd46c/aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91", size = 1213401 }, + { url = "https://files.pythonhosted.org/packages/3f/e7/6e69a0b0d896fbaf1192d492db4c21688e6c0d327486da610b0e8195bcc9/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f", size = 1212450 }, + { url = "https://files.pythonhosted.org/packages/a9/7f/a42f51074c723ea848254946aec118f1e59914a639dc8ba20b0c9247c195/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c", size = 1211324 }, + { url = "https://files.pythonhosted.org/packages/d5/43/c2f9d2f588ccef8f028f0a0c999b5ceafecbda50b943313faee7e91f3e03/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69", size = 1266838 }, + { url = "https://files.pythonhosted.org/packages/c1/a7/ff9f067ecb06896d859e4f2661667aee4bd9c616689599ff034b63cbd9d7/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3", size = 1285301 }, + { url = "https://files.pythonhosted.org/packages/9a/e3/dd56bb4c67d216046ce61d98dec0f3023043f1de48f561df1bf93dd47aea/aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683", size = 1235806 }, + { url = "https://files.pythonhosted.org/packages/a7/64/90dcd42ac21927a49ba4140b2e4d50e1847379427ef6c43eb338ef9960e3/aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef", size = 360162 }, + { url = "https://files.pythonhosted.org/packages/f3/45/145d8b4853fc92c0c8509277642767e7726a085e390ce04353dc68b0f5b5/aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088", size = 379173 }, + { url = "https://files.pythonhosted.org/packages/f1/90/54ccb1e4eadfb6c95deff695582453f6208584431d69bf572782e9ae542b/aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2", size = 586455 }, + { url = "https://files.pythonhosted.org/packages/c3/7a/95e88c02756e7e718f054e1bb3ec6ad5d0ee4a2ca2bb1768c5844b3de30a/aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf", size = 397255 }, + { url = "https://files.pythonhosted.org/packages/07/4f/767387b39990e1ee9aba8ce642abcc286d84d06e068dc167dab983898f18/aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e", size = 388973 }, + { url = "https://files.pythonhosted.org/packages/61/46/0df41170a4d228c07b661b1ba9d87101d99a79339dc93b8b1183d8b20545/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77", size = 1326126 }, + { url = "https://files.pythonhosted.org/packages/af/20/da0d65e07ce49d79173fed41598f487a0a722e87cfbaa8bb7e078a7c1d39/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061", size = 1364538 }, + { url = "https://files.pythonhosted.org/packages/aa/20/b59728405114e57541ba9d5b96033e69d004e811ded299537f74237629ca/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697", size = 1399896 }, + { url = "https://files.pythonhosted.org/packages/2a/92/006690c31b830acbae09d2618e41308fe4c81c0679b3b33a3af859e0b7bf/aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7", size = 1312914 }, + { url = "https://files.pythonhosted.org/packages/d4/71/1a253ca215b6c867adbd503f1e142117527ea8775e65962bc09b2fad1d2c/aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0", size = 1271301 }, + { url = "https://files.pythonhosted.org/packages/0a/ab/5d1d9ff9ce6cce8fa54774d0364e64a0f3cd50e512ff09082ced8e5217a1/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5", size = 1291652 }, + { url = "https://files.pythonhosted.org/packages/75/5f/f90510ea954b9ae6e7a53d2995b97a3e5c181110fdcf469bc9238445871d/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e", size = 1286289 }, + { url = "https://files.pythonhosted.org/packages/be/9e/1f523414237798660921817c82b9225a363af436458caf584d2fa6a2eb4a/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1", size = 1341848 }, + { url = "https://files.pythonhosted.org/packages/f6/36/443472ddaa85d7d80321fda541d9535b23ecefe0bf5792cc3955ea635190/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277", size = 1361619 }, + { url = "https://files.pythonhosted.org/packages/19/f6/3ecbac0bc4359c7d7ba9e85c6b10f57e20edaf1f97751ad2f892db231ad0/aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058", size = 1320869 }, + { url = "https://files.pythonhosted.org/packages/34/7e/ed74ffb36e3a0cdec1b05d8fbaa29cb532371d5a20058b3a8052fc90fe7c/aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072", size = 359271 }, + { url = "https://files.pythonhosted.org/packages/98/1b/718901f04bc8c886a742be9e83babb7b93facabf7c475cc95e2b3ab80b4d/aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff", size = 379143 }, + { url = "https://files.pythonhosted.org/packages/d9/1c/74f9dad4a2fc4107e73456896283d915937f48177b99867b63381fadac6e/aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487", size = 583468 }, + { url = "https://files.pythonhosted.org/packages/12/29/68d090551f2b58ce76c2b436ced8dd2dfd32115d41299bf0b0c308a5483c/aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a", size = 394066 }, + { url = "https://files.pythonhosted.org/packages/8f/f7/971f88b4cdcaaa4622925ba7d86de47b48ec02a9040a143514b382f78da4/aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d", size = 389098 }, + { url = "https://files.pythonhosted.org/packages/f1/5a/fe3742efdce551667b2ddf1158b27c5b8eb1edc13d5e14e996e52e301025/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75", size = 1332742 }, + { url = "https://files.pythonhosted.org/packages/1a/52/a25c0334a1845eb4967dff279151b67ca32a948145a5812ed660ed900868/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178", size = 1372134 }, + { url = "https://files.pythonhosted.org/packages/96/3d/33c1d8efc2d8ec36bff9a8eca2df9fdf8a45269c6e24a88e74f2aa4f16bd/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e", size = 1414413 }, + { url = "https://files.pythonhosted.org/packages/64/74/0f1ddaa5f0caba1d946f0dd0c31f5744116e4a029beec454ec3726d3311f/aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f", size = 1328107 }, + { url = "https://files.pythonhosted.org/packages/0a/32/c10118f0ad50e4093227234f71fd0abec6982c29367f65f32ee74ed652c4/aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73", size = 1280126 }, + { url = "https://files.pythonhosted.org/packages/c6/c9/77e3d648d97c03a42acfe843d03e97be3c5ef1b4d9de52e5bd2d28eed8e7/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf", size = 1292660 }, + { url = "https://files.pythonhosted.org/packages/7e/5d/99c71f8e5c8b64295be421b4c42d472766b263a1fe32e91b64bf77005bf2/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820", size = 1300988 }, + { url = "https://files.pythonhosted.org/packages/8f/2c/76d2377dd947f52fbe8afb19b18a3b816d66c7966755c04030f93b1f7b2d/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca", size = 1339268 }, + { url = "https://files.pythonhosted.org/packages/fd/e6/3d9d935cc705d57ed524d82ec5d6b678a53ac1552720ae41282caa273584/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91", size = 1366993 }, + { url = "https://files.pythonhosted.org/packages/fe/c2/f7eed4d602f3f224600d03ab2e1a7734999b0901b1c49b94dc5891340433/aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6", size = 1329459 }, + { url = "https://files.pythonhosted.org/packages/ce/8f/27f205b76531fc592abe29e1ad265a16bf934a9f609509c02d765e6a8055/aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12", size = 356968 }, + { url = "https://files.pythonhosted.org/packages/39/8c/4f6c0b2b3629f6be6c81ab84d9d577590f74f01d4412bfc4067958eaa1e1/aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc", size = 377650 }, + { url = "https://files.pythonhosted.org/packages/7b/b9/03b4327897a5b5d29338fa9b514f1c2f66a3e4fc88a4e40fad478739314d/aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092", size = 576994 }, + { url = "https://files.pythonhosted.org/packages/67/1b/20c2e159cd07b8ed6dde71c2258233902fdf415b2fe6174bd2364ba63107/aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77", size = 390684 }, + { url = "https://files.pythonhosted.org/packages/4d/6b/ff83b34f157e370431d8081c5d1741963f4fb12f9aaddb2cacbf50305225/aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385", size = 386176 }, + { url = "https://files.pythonhosted.org/packages/4d/a1/6e92817eb657de287560962df4959b7ddd22859c4b23a0309e2d3de12538/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972", size = 1303310 }, + { url = "https://files.pythonhosted.org/packages/04/29/200518dc7a39c30ae6d5bc232d7207446536e93d3d9299b8e95db6e79c54/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16", size = 1340445 }, + { url = "https://files.pythonhosted.org/packages/8e/20/53f7bba841ba7b5bb5dea580fea01c65524879ba39cb917d08c845524717/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6", size = 1385121 }, + { url = "https://files.pythonhosted.org/packages/f1/b4/d99354ad614c48dd38fb1ee880a1a54bd9ab2c3bcad3013048d4a1797d3a/aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa", size = 1299669 }, + { url = "https://files.pythonhosted.org/packages/51/39/ca1de675f2a5729c71c327e52ac6344e63f036bd37281686ae5c3fb13bfb/aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689", size = 1252638 }, + { url = "https://files.pythonhosted.org/packages/54/cf/a3ae7ff43138422d477348e309ef8275779701bf305ff6054831ef98b782/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57", size = 1266889 }, + { url = "https://files.pythonhosted.org/packages/6e/7a/c6027ad70d9fb23cf254a26144de2723821dade1a624446aa22cd0b6d012/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f", size = 1266249 }, + { url = "https://files.pythonhosted.org/packages/64/fd/ed136d46bc2c7e3342fed24662b4827771d55ceb5a7687847aae977bfc17/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599", size = 1311036 }, + { url = "https://files.pythonhosted.org/packages/76/9a/43eeb0166f1119256d6f43468f900db1aed7fbe32069d2a71c82f987db4d/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5", size = 1338756 }, + { url = "https://files.pythonhosted.org/packages/d5/bc/d01ff0810b3f5e26896f76d44225ed78b088ddd33079b85cd1a23514318b/aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987", size = 1299976 }, + { url = "https://files.pythonhosted.org/packages/3e/c9/50a297c4f7ab57a949f4add2d3eafe5f3e68bb42f739e933f8b32a092bda/aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04", size = 355609 }, + { url = "https://files.pythonhosted.org/packages/65/28/aee9d04fb0b3b1f90622c338a08e54af5198e704a910e20947c473298fd0/aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022", size = 375697 }, +] + +[[package]] +name = "aiosignal" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/67/0952ed97a9793b4958e5736f6d2b346b414a2cd63e82d05940032f45b32f/aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", size = 19422 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17", size = 7617 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anthropic" +version = "0.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "distro", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jiter", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/e2/98ff733ff75c1d371c029fb27eb9308f9c8e694749cea70382338a8e7e88/anthropic-0.34.1.tar.gz", hash = "sha256:69e822bd7a31ec11c2edb85f2147e8f0ee0cfd3288fea70b0ca8808b2f9bf91d", size = 901462 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/1c/1ce9edec76885badebacb4e31d42acffbdfd30dbaa839d5c378d57ac9aa9/anthropic-0.34.1-py3-none-any.whl", hash = "sha256:2fa26710809d0960d970f26cd0be3686437250a481edb95c33d837aa5fa24158", size = 891537 }, +] + +[[package]] +name = "anyio" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e6/e3/c4c8d473d6780ef1853d630d581f70d655b4f8d7553c6997958c283039a2/anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94", size = 163930 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/a2/10639a79341f6c019dedc95bd48a4928eed9f1d1197f4c04f546fc7ae0ff/anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7", size = 86780 }, +] + +[[package]] +name = "appnope" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 }, +] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argon2-cffi-bindings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/31/fa/57ec2c6d16ecd2ba0cf15f3c7d1c3c2e7b5fcb83555ff56d7ab10888ec8f/argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08", size = 42798 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/6a/e8a041599e78b6b3752da48000b14c8d1e8a04ded09c88c714ba047f34f5/argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea", size = 15124 }, +] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/e9/184b8ccce6683b0aa2fbb7ba5683ea4b9c5763f1356347f1312c32e3c66e/argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3", size = 1779911 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/13/838ce2620025e9666aa8f686431f67a29052241692a3dd1ae9d3692a89d3/argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367", size = 29658 }, + { url = "https://files.pythonhosted.org/packages/b3/02/f7f7bb6b6af6031edb11037639c697b912e1dea2db94d436e681aea2f495/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d", size = 80583 }, + { url = "https://files.pythonhosted.org/packages/ec/f7/378254e6dd7ae6f31fe40c8649eea7d4832a42243acaf0f1fff9083b2bed/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae", size = 86168 }, + { url = "https://files.pythonhosted.org/packages/74/f6/4a34a37a98311ed73bb80efe422fed95f2ac25a4cacc5ae1d7ae6a144505/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c", size = 82709 }, + { url = "https://files.pythonhosted.org/packages/74/2b/73d767bfdaab25484f7e7901379d5f8793cccbb86c6e0cbc4c1b96f63896/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86", size = 83613 }, + { url = "https://files.pythonhosted.org/packages/4f/fd/37f86deef67ff57c76f137a67181949c2d408077e2e3dd70c6c42912c9bf/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f", size = 84583 }, + { url = "https://files.pythonhosted.org/packages/6f/52/5a60085a3dae8fded8327a4f564223029f5f54b0cb0455a31131b5363a01/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e", size = 88475 }, + { url = "https://files.pythonhosted.org/packages/8b/95/143cd64feb24a15fa4b189a3e1e7efbaeeb00f39a51e99b26fc62fbacabd/argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082", size = 27698 }, + { url = "https://files.pythonhosted.org/packages/37/2c/e34e47c7dee97ba6f01a6203e0383e15b60fb85d78ac9a15cd066f6fe28b/argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f", size = 30817 }, + { url = "https://files.pythonhosted.org/packages/5a/e4/bf8034d25edaa495da3c8a3405627d2e35758e44ff6eaa7948092646fdcc/argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93", size = 53104 }, +] + +[[package]] +name = "asgiref" +version = "3.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/38/b3395cc9ad1b56d2ddac9970bc8f4141312dbaec28bc7c218b0dfafd0f42/asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590", size = 35186 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828 }, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/1d/f03bcb60c4a3212e15f99a56085d93093a497718adf828d050b9d675da81/asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0", size = 62284 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/86/4736ac618d82a20d87d2f92ae19441ebc7ac9e7a581d7e58bbe79233b24a/asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", size = 27764 }, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/d6/21b30a550dafea84b1b8eee21b5e23fa16d010ae006011221f33dcd8d7f8/async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", size = 8345 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028", size = 5721 }, +] + +[[package]] +name = "attrs" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 }, +] + +[[package]] +name = "authlib" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/47/df70ecd34fbf86d69833fe4e25bb9ecbaab995c8e49df726dd416f6bb822/authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917", size = 146074 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/1f/bc95e43ffb57c05b8efcc376dd55a0240bf58f47ddf5a0f92452b6457b75/Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377", size = 223827 }, +] + +[[package]] +name = "azure-ai-inference" +version = "1.0.0b3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/89/5ddefdc2ce920b68bc741e3be6c2a0ca58702eeca778546a356c3ae7fe24/azure-ai-inference-1.0.0b3.tar.gz", hash = "sha256:1e99dc74c3b335a457500311bbbadb348f54dc4c12252a93cb8ab78d6d217ff0", size = 104451 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/79/9f84eb6c03c6e18b36317b587b29b3244037ab2f65acd48666d9e62fdfdc/azure_ai_inference-1.0.0b3-py3-none-any.whl", hash = "sha256:6734ca7334c809a170beb767f1f1455724ab3f006cb60045e42a833c0e764403", size = 85951 }, +] + +[[package]] +name = "azure-common" +version = "1.1.28" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/71/f6f71a276e2e69264a97ad39ef850dca0a04fce67b12570730cb38d0ccac/azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3", size = 20914 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/55/7f118b9c1b23ec15ca05d15a578d8207aa1706bc6f7c87218efffbbf875d/azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad", size = 14462 }, +] + +[[package]] +name = "azure-core" +version = "1.30.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/d4/1f469fa246f554b86fb5cebc30eef1b2a38b7af7a2c2791bce0a4c6e4604/azure-core-1.30.2.tar.gz", hash = "sha256:a14dc210efcd608821aa472d9fb8e8d035d29b68993819147bc290a8ac224472", size = 271104 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/d7/69d53f37733f8cb844862781767aef432ff3152bc9b9864dc98c7e286ce9/azure_core-1.30.2-py3-none-any.whl", hash = "sha256:cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a", size = 194253 }, +] + +[[package]] +name = "azure-cosmos" +version = "4.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/5a/1ae2e5e58da70ffcb2ee50fadece728b4941489cc8febbbf46d5522f6fff/azure-cosmos-4.7.0.tar.gz", hash = "sha256:72d714033134656302a2e8957c4b93590673bd288b0ca60cb123e348ae99a241", size = 381958 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/d4/38929bb3504bb9a2cb21ede7a1c652ebe7487b65da8885ea4039647bba86/azure_cosmos-4.7.0-py3-none-any.whl", hash = "sha256:03d8c7740ddc2906fb16e07b136acc0fe6a6a02656db46c5dd6f1b127b58cc96", size = 252084 }, +] + +[[package]] +name = "azure-identity" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "msal", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "msal-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/51/c9/f7e3926686a89670ce641b360bd2da9a2d7a12b3e532403462d99f81e9d5/azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea", size = 246652 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/83/a777861351e7b99e7c84ff3b36bab35e87b6e5d36e50b6905e148c696515/azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382", size = 173229 }, +] + +[[package]] +name = "azure-search-documents" +version = "11.6.0b4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-common", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/8b/1253b0694d54fe55edf8a18ee29af04247d1113b870c40ea95cf4fdb8176/azure-search-documents-11.6.0b4.tar.gz", hash = "sha256:b09fc3fa2813e83e7177874b352c84462fb86934d9f4299775361e1dfccc3f8f", size = 324608 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/9c/4f1cd68cdedc014d7d157d86671c648b2983184e098fc659c27c9a5ccd60/azure_search_documents-11.6.0b4-py3-none-any.whl", hash = "sha256:9590392464f882762ce6bad03613c822d4423f09f311c275b833de25398c00c1", size = 325310 }, +] + +[[package]] +name = "azure-storage-blob" +version = "12.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/de/9cea85c0d5fc21f99bcf9f060fc2287cb95236b70431fa63cb69890a121e/azure-storage-blob-12.22.0.tar.gz", hash = "sha256:b3804bb4fe8ab1c32771fa464053da772a682c2737b19da438a3f4e5e3b3736e", size = 564873 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/52/b578c94048469fbf9f6378e2b2a46a2d0ccba3d59a7845dbed22ebf61601/azure_storage_blob-12.22.0-py3-none-any.whl", hash = "sha256:bb7d2d824ce3f11f14a27ee7d9281289f7e072ac8311c52e3652672455b7d5e8", size = 404892 }, +] + +[[package]] +name = "backoff" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148 }, +] + +[[package]] +name = "bcrypt" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/7e/d95e7d96d4828e965891af92e43b52a4cd3395dc1c1ef4ee62748d0471d0/bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221", size = 24294 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/81/4e8f5bc0cd947e91fb720e1737371922854da47a94bc9630454e7b2845f8/bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb", size = 471568 }, + { url = "https://files.pythonhosted.org/packages/05/d2/1be1e16aedec04bcf8d0156e01b987d16a2063d38e64c3f28030a3427d61/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00", size = 277372 }, + { url = "https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d", size = 273488 }, + { url = "https://files.pythonhosted.org/packages/46/54/dc7b58abeb4a3d95bab653405935e27ba32f21b812d8ff38f271fb6f7f55/bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291", size = 277759 }, + { url = "https://files.pythonhosted.org/packages/ac/be/da233c5f11fce3f8adec05e8e532b299b64833cc962f49331cdd0e614fa9/bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328", size = 273796 }, + { url = "https://files.pythonhosted.org/packages/b0/b8/8b4add88d55a263cf1c6b8cf66c735280954a04223fcd2880120cc767ac3/bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7", size = 311082 }, + { url = "https://files.pythonhosted.org/packages/7b/76/2aa660679abbdc7f8ee961552e4bb6415a81b303e55e9374533f22770203/bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399", size = 305912 }, + { url = "https://files.pythonhosted.org/packages/00/03/2af7c45034aba6002d4f2b728c1a385676b4eab7d764410e34fd768009f2/bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060", size = 325185 }, + { url = "https://files.pythonhosted.org/packages/dc/5d/6843443ce4ab3af40bddb6c7c085ed4a8418b3396f7a17e60e6d9888416c/bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7", size = 335188 }, + { url = "https://files.pythonhosted.org/packages/cb/4c/ff8ca83d816052fba36def1d24e97d9a85739b9bbf428c0d0ecd296a07c8/bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458", size = 156481 }, + { url = "https://files.pythonhosted.org/packages/65/f1/e09626c88a56cda488810fb29d5035f1662873777ed337880856b9d204ae/bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5", size = 151336 }, + { url = "https://files.pythonhosted.org/packages/96/86/8c6a84daed4dd878fbab094400c9174c43d9b838ace077a2f8ee8bc3ae12/bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841", size = 472414 }, + { url = "https://files.pythonhosted.org/packages/f6/05/e394515f4e23c17662e5aeb4d1859b11dc651be01a3bd03c2e919a155901/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68", size = 277599 }, + { url = "https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe", size = 273491 }, + { url = "https://files.pythonhosted.org/packages/cc/14/b9ff8e0218bee95e517b70e91130effb4511e8827ac1ab00b4e30943a3f6/bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2", size = 277934 }, + { url = "https://files.pythonhosted.org/packages/3e/d0/31938bb697600a04864246acde4918c4190a938f891fd11883eaaf41327a/bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c", size = 273804 }, + { url = "https://files.pythonhosted.org/packages/e7/c3/dae866739989e3f04ae304e1201932571708cb292a28b2f1b93283e2dcd8/bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae", size = 311275 }, + { url = "https://files.pythonhosted.org/packages/5d/2c/019bc2c63c6125ddf0483ee7d914a405860327767d437913942b476e9c9b/bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d", size = 306355 }, + { url = "https://files.pythonhosted.org/packages/75/fe/9e137727f122bbe29771d56afbf4e0dbc85968caa8957806f86404a5bfe1/bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e", size = 325381 }, + { url = "https://files.pythonhosted.org/packages/1a/d4/586b9c18a327561ea4cd336ff4586cca1a7aa0f5ee04e23a8a8bb9ca64f1/bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8", size = 335685 }, + { url = "https://files.pythonhosted.org/packages/24/55/1a7127faf4576138bb278b91e9c75307490178979d69c8e6e273f74b974f/bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34", size = 155857 }, + { url = "https://files.pythonhosted.org/packages/1c/2a/c74052e54162ec639266d91539cca7cbf3d1d3b8b36afbfeaee0ea6a1702/bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9", size = 151717 }, + { url = "https://files.pythonhosted.org/packages/09/97/01026e7b1b7f8aeb41514408eca1137c0f8aef9938335e3bc713f82c282e/bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a", size = 275924 }, + { url = "https://files.pythonhosted.org/packages/ca/46/03eb26ea3e9c12ca18d1f3bf06199f7d72ce52e68f2a1ebcfd8acff9c472/bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db", size = 272242 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, +] + +[[package]] +name = "bleach" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "webencodings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/10/77f32b088738f40d4f5be801daa5f327879eadd4562f36a2b5ab975ae571/bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe", size = 202119 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/63/da7237f805089ecc28a3f36bca6a21c31fcbc2eb380f3b8f1be3312abd14/bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6", size = 162750 }, +] + +[[package]] +name = "build" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "(os_name == 'nt' and sys_platform == 'darwin') or (os_name == 'nt' and sys_platform == 'linux') or (os_name == 'nt' and sys_platform == 'win32')" }, + { name = "importlib-metadata", marker = "(python_full_version < '3.10.2' and sys_platform == 'darwin') or (python_full_version < '3.10.2' and sys_platform == 'linux') or (python_full_version < '3.10.2' and sys_platform == 'win32')" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyproject-hooks", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/9e/2d725d2f7729c6e79ca62aeb926492abbc06e25910dd30139d60a68bcb19/build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d", size = 44781 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/03/f3c8ba0a6b6e30d7d18c40faab90807c9bb5e9a1e3b2fe2008af624a9c97/build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4", size = 21911 }, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "certifi" +version = "2024.7.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/02/a95f2b11e207f68bc64d7aae9666fed2e2b3f307748d5123dffb72a1bbea/certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", size = 164065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/d5/c84e1a17bf61d4df64ca866a1c9a913874b4e9bdc131ec689a0ad013fb36/certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90", size = 162960 }, +] + +[[package]] +name = "cffi" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/bf/82c351342972702867359cfeba5693927efe0a8dd568165490144f554b18/cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76", size = 516073 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2a/9071bf1e20bf9f695643b6c3e0f838f340b95ee29de0d1bb7968772409be/cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb", size = 181841 }, + { url = "https://files.pythonhosted.org/packages/4b/42/60116f10466d692b64aef32ac40fd79b11344ab6ef889ff8e3d047f2fcb2/cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a", size = 178242 }, + { url = "https://files.pythonhosted.org/packages/26/8e/a53f844454595c6e9215e56cda123db3427f8592f2c7b5ef1be782f620d6/cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42", size = 425676 }, + { url = "https://files.pythonhosted.org/packages/60/ac/6402563fb40b64c7ccbea87836d9c9498b374629af3449f3d8ff34df187d/cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d", size = 447842 }, + { url = "https://files.pythonhosted.org/packages/b2/e7/e2ffdb8de59f48f17b196813e9c717fbed2364e39b10bdb3836504e89486/cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2", size = 455224 }, + { url = "https://files.pythonhosted.org/packages/59/55/3e8968e92fe35c1c368959a070a1276c10cae29cdad0fd0daa36c69e237e/cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab", size = 436341 }, + { url = "https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b", size = 445861 }, + { url = "https://files.pythonhosted.org/packages/5a/70/637f070aae533ea11ab77708a820f3935c0edb4fbcef9393b788e6f426a5/cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206", size = 460982 }, + { url = "https://files.pythonhosted.org/packages/f7/1a/7d4740fa1ccc4fcc888963fc3165d69ef1a2c8d42c8911c946703ff5d4a5/cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa", size = 438434 }, + { url = "https://files.pythonhosted.org/packages/d0/d9/c48cc38aaf6f53a8b5d2dbf6fe788410fcbab33b15a69c56c01d2b08f6a2/cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f", size = 461219 }, + { url = "https://files.pythonhosted.org/packages/26/ec/b6a7f660a7f27bd2bb53fe99a2ccafa279088395ec8639b25b8950985b2d/cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc", size = 171406 }, + { url = "https://files.pythonhosted.org/packages/08/42/8c00824787e6f5ec55194f5cd30c4ba4b9d9d5bb0d4d0007b1bb948d4ad4/cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2", size = 180809 }, + { url = "https://files.pythonhosted.org/packages/53/cc/9298fb6235522e00e47d78d6aa7f395332ef4e5f6fe124f9a03aa60600f7/cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720", size = 181912 }, + { url = "https://files.pythonhosted.org/packages/e7/79/dc5334fbe60635d0846c56597a8d2af078a543ff22bc48d36551a0de62c2/cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9", size = 178297 }, + { url = "https://files.pythonhosted.org/packages/39/d7/ef1b6b16b51ccbabaced90ff0d821c6c23567fc4b2e4a445aea25d3ceb92/cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb", size = 444909 }, + { url = "https://files.pythonhosted.org/packages/29/b8/6e3c61885537d985c78ef7dd779b68109ba256263d74a2f615c40f44548d/cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424", size = 468854 }, + { url = "https://files.pythonhosted.org/packages/0b/49/adad1228e19b931e523c2731e6984717d5f9e33a2f9971794ab42815b29b/cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d", size = 476890 }, + { url = "https://files.pythonhosted.org/packages/76/54/c00f075c3e7fd14d9011713bcdb5b4f105ad044c5ad948db7b1a0a7e4e78/cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8", size = 459374 }, + { url = "https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6", size = 466891 }, + { url = "https://files.pythonhosted.org/packages/31/52/72bbc95f6d06ff2e88a6fa13786be4043e542cb24748e1351aba864cb0a7/cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91", size = 477658 }, + { url = "https://files.pythonhosted.org/packages/67/20/d694811457eeae0c7663fa1a7ca201ce495533b646c1180d4ac25684c69c/cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8", size = 453890 }, + { url = "https://files.pythonhosted.org/packages/dc/79/40cbf5739eb4f694833db5a27ce7f63e30a9b25b4a836c4f25fb7272aacc/cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb", size = 478254 }, + { url = "https://files.pythonhosted.org/packages/e9/eb/2c384c385cca5cae67ca10ac4ef685277680b8c552b99aedecf4ea23ff7e/cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9", size = 171285 }, + { url = "https://files.pythonhosted.org/packages/ca/42/74cb1e0f1b79cb64672f3cb46245b506239c1297a20c0d9c3aeb3929cb0c/cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0", size = 180842 }, + { url = "https://files.pythonhosted.org/packages/1a/1f/7862231350cc959a3138889d2c8d33da7042b22e923457dfd4cd487d772a/cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc", size = 182826 }, + { url = "https://files.pythonhosted.org/packages/8b/8c/26119bf8b79e05a1c39812064e1ee7981e1f8a5372205ba5698ea4dd958d/cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59", size = 178494 }, + { url = "https://files.pythonhosted.org/packages/61/94/4882c47d3ad396d91f0eda6ef16d45be3d752a332663b7361933039ed66a/cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb", size = 454459 }, + { url = "https://files.pythonhosted.org/packages/0f/7c/a6beb119ad515058c5ee1829742d96b25b2b9204ff920746f6e13bf574eb/cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195", size = 478502 }, + { url = "https://files.pythonhosted.org/packages/61/8a/2575cd01a90e1eca96a30aec4b1ac101a6fae06c49d490ac2704fa9bc8ba/cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e", size = 485381 }, + { url = "https://files.pythonhosted.org/packages/cd/66/85899f5a9f152db49646e0c77427173e1b77a1046de0191ab3b0b9a5e6e3/cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828", size = 470907 }, + { url = "https://files.pythonhosted.org/packages/00/13/150924609bf377140abe6e934ce0a57f3fc48f1fd956ec1f578ce97a4624/cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150", size = 479074 }, + { url = "https://files.pythonhosted.org/packages/17/fd/7d73d7110155c036303b0a6462c56250e9bc2f4119d7591d27417329b4d1/cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a", size = 484225 }, + { url = "https://files.pythonhosted.org/packages/fc/83/8353e5c9b01bb46332dac3dfb18e6c597a04ceb085c19c814c2f78a8c0d0/cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885", size = 488388 }, + { url = "https://files.pythonhosted.org/packages/73/0c/f9d5ca9a095b1fc88ef77d1f8b85d11151c374144e4606da33874e17b65b/cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492", size = 172096 }, + { url = "https://files.pythonhosted.org/packages/72/21/8c5d285fe20a6e31d29325f1287bb0e55f7d93630a5a44cafdafb5922495/cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2", size = 181478 }, + { url = "https://files.pythonhosted.org/packages/17/8f/581f2f3c3464d5f7cf87c2f7a5ba9acc6976253e02d73804240964243ec2/cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118", size = 182638 }, + { url = "https://files.pythonhosted.org/packages/8d/1c/c9afa66684b7039f48018eb11b229b659dfb32b7a16b88251bac106dd1ff/cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7", size = 178453 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/1a134d479d3a5a1ff2fabbee551d1d3f1dd70f453e081b5f70d604aae4c0/cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377", size = 454441 }, + { url = "https://files.pythonhosted.org/packages/b1/b4/e1569475d63aad8042b0935dbf62ae2a54d1e9142424e2b0e924d2d4a529/cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb", size = 478543 }, + { url = "https://files.pythonhosted.org/packages/d2/40/a9ad03fbd64309dec5bb70bc803a9a6772602de0ee164d7b9a6ca5a89249/cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555", size = 485463 }, + { url = "https://files.pythonhosted.org/packages/a6/1a/f10be60e006dd9242a24bcc2b1cd55c34c578380100f742d8c610f7a5d26/cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204", size = 470854 }, + { url = "https://files.pythonhosted.org/packages/cc/b3/c035ed21aa3d39432bd749fe331ee90e4bc83ea2dbed1f71c4bc26c41084/cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f", size = 479096 }, + { url = "https://files.pythonhosted.org/packages/00/cb/6f7edde01131de9382c89430b8e253b8c8754d66b63a62059663ceafeab2/cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0", size = 484013 }, + { url = "https://files.pythonhosted.org/packages/b9/83/8e4e8c211ea940210d293e951bf06b1bfb90f2eeee590e9778e99b4a8676/cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4", size = 488119 }, + { url = "https://files.pythonhosted.org/packages/5e/52/3f7cfbc4f444cb4f73ff17b28690d12436dde665f67d68f1e1687908ab6c/cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a", size = 172122 }, + { url = "https://files.pythonhosted.org/packages/94/19/cf5baa07ee0f0e55eab7382459fbddaba0fdb0ba45973dd92556ae0d02db/cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7", size = 181504 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/09/c1bc53dab74b1816a00d8d030de5bf98f724c52c1635e07681d312f20be8/charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", size = 104809 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/61/095a0aa1a84d1481998b534177c8566fdc50bb1233ea9a0478cd3cc075bd/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", size = 194219 }, + { url = "https://files.pythonhosted.org/packages/cc/94/f7cf5e5134175de79ad2059edf2adce18e0685ebdb9227ff0139975d0e93/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", size = 122521 }, + { url = "https://files.pythonhosted.org/packages/46/6a/d5c26c41c49b546860cc1acabdddf48b0b3fb2685f4f5617ac59261b44ae/charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", size = 120383 }, + { url = "https://files.pythonhosted.org/packages/b8/60/e2f67915a51be59d4539ed189eb0a2b0d292bf79270410746becb32bc2c3/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", size = 138223 }, + { url = "https://files.pythonhosted.org/packages/05/8c/eb854996d5fef5e4f33ad56927ad053d04dc820e4a3d39023f35cad72617/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", size = 148101 }, + { url = "https://files.pythonhosted.org/packages/f6/93/bb6cbeec3bf9da9b2eba458c15966658d1daa8b982c642f81c93ad9b40e1/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", size = 140699 }, + { url = "https://files.pythonhosted.org/packages/da/f1/3702ba2a7470666a62fd81c58a4c40be00670e5006a67f4d626e57f013ae/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", size = 142065 }, + { url = "https://files.pythonhosted.org/packages/3f/ba/3f5e7be00b215fa10e13d64b1f6237eb6ebea66676a41b2bcdd09fe74323/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", size = 144505 }, + { url = "https://files.pythonhosted.org/packages/33/c3/3b96a435c5109dd5b6adc8a59ba1d678b302a97938f032e3770cc84cd354/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", size = 139425 }, + { url = "https://files.pythonhosted.org/packages/43/05/3bf613e719efe68fb3a77f9c536a389f35b95d75424b96b426a47a45ef1d/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", size = 145287 }, + { url = "https://files.pythonhosted.org/packages/58/78/a0bc646900994df12e07b4ae5c713f2b3e5998f58b9d3720cce2aa45652f/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", size = 149929 }, + { url = "https://files.pythonhosted.org/packages/eb/5c/97d97248af4920bc68687d9c3b3c0f47c910e21a8ff80af4565a576bd2f0/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", size = 141605 }, + { url = "https://files.pythonhosted.org/packages/a8/31/47d018ef89f95b8aded95c589a77c072c55e94b50a41aa99c0a2008a45a4/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", size = 142646 }, + { url = "https://files.pythonhosted.org/packages/ae/d5/4fecf1d58bedb1340a50f165ba1c7ddc0400252d6832ff619c4568b36cc0/charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", size = 92846 }, + { url = "https://files.pythonhosted.org/packages/a2/a0/4af29e22cb5942488cf45630cbdd7cefd908768e69bdd90280842e4e8529/charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", size = 100343 }, + { url = "https://files.pythonhosted.org/packages/68/77/02839016f6fbbf808e8b38601df6e0e66c17bbab76dff4613f7511413597/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", size = 191647 }, + { url = "https://files.pythonhosted.org/packages/3e/33/21a875a61057165e92227466e54ee076b73af1e21fe1b31f1e292251aa1e/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", size = 121434 }, + { url = "https://files.pythonhosted.org/packages/dd/51/68b61b90b24ca35495956b718f35a9756ef7d3dd4b3c1508056fa98d1a1b/charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", size = 118979 }, + { url = "https://files.pythonhosted.org/packages/e4/a6/7ee57823d46331ddc37dd00749c95b0edec2c79b15fc0d6e6efb532e89ac/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", size = 136582 }, + { url = "https://files.pythonhosted.org/packages/74/f1/0d9fe69ac441467b737ba7f48c68241487df2f4522dd7246d9426e7c690e/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", size = 146645 }, + { url = "https://files.pythonhosted.org/packages/05/31/e1f51c76db7be1d4aef220d29fbfa5dbb4a99165d9833dcbf166753b6dc0/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", size = 139398 }, + { url = "https://files.pythonhosted.org/packages/40/26/f35951c45070edc957ba40a5b1db3cf60a9dbb1b350c2d5bef03e01e61de/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", size = 140273 }, + { url = "https://files.pythonhosted.org/packages/07/07/7e554f2bbce3295e191f7e653ff15d55309a9ca40d0362fcdab36f01063c/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", size = 142577 }, + { url = "https://files.pythonhosted.org/packages/d8/b5/eb705c313100defa57da79277d9207dc8d8e45931035862fa64b625bfead/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", size = 137747 }, + { url = "https://files.pythonhosted.org/packages/19/28/573147271fd041d351b438a5665be8223f1dd92f273713cb882ddafe214c/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", size = 143375 }, + { url = "https://files.pythonhosted.org/packages/cf/7c/f3b682fa053cc21373c9a839e6beba7705857075686a05c72e0f8c4980ca/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/1e/49/7ab74d4ac537ece3bc3334ee08645e231f39f7d6df6347b29a74b0537103/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", size = 140232 }, + { url = "https://files.pythonhosted.org/packages/2d/dc/9dacba68c9ac0ae781d40e1a0c0058e26302ea0660e574ddf6797a0347f7/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", size = 140859 }, + { url = "https://files.pythonhosted.org/packages/6c/c2/4a583f800c0708dd22096298e49f887b49d9746d0e78bfc1d7e29816614c/charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", size = 92509 }, + { url = "https://files.pythonhosted.org/packages/57/ec/80c8d48ac8b1741d5b963797b7c0c869335619e13d4744ca2f67fc11c6fc/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", size = 99870 }, + { url = "https://files.pythonhosted.org/packages/d1/b2/fcedc8255ec42afee97f9e6f0145c734bbe104aac28300214593eb326f1d/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", size = 192892 }, + { url = "https://files.pythonhosted.org/packages/2e/7d/2259318c202f3d17f3fe6438149b3b9e706d1070fe3fcbb28049730bb25c/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", size = 122213 }, + { url = "https://files.pythonhosted.org/packages/3a/52/9f9d17c3b54dc238de384c4cb5a2ef0e27985b42a0e5cc8e8a31d918d48d/charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", size = 119404 }, + { url = "https://files.pythonhosted.org/packages/99/b0/9c365f6d79a9f0f3c379ddb40a256a67aa69c59609608fe7feb6235896e1/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", size = 137275 }, + { url = "https://files.pythonhosted.org/packages/91/33/749df346e93d7a30cdcb90cbfdd41a06026317bfbfb62cd68307c1a3c543/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", size = 147518 }, + { url = "https://files.pythonhosted.org/packages/72/1a/641d5c9f59e6af4c7b53da463d07600a695b9824e20849cb6eea8a627761/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", size = 140182 }, + { url = "https://files.pythonhosted.org/packages/ee/fb/14d30eb4956408ee3ae09ad34299131fb383c47df355ddb428a7331cfa1e/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", size = 141869 }, + { url = "https://files.pythonhosted.org/packages/df/3e/a06b18788ca2eb6695c9b22325b6fde7dde0f1d1838b1792a0076f58fe9d/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", size = 144042 }, + { url = "https://files.pythonhosted.org/packages/45/59/3d27019d3b447a88fe7e7d004a1e04be220227760264cc41b405e863891b/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", size = 138275 }, + { url = "https://files.pythonhosted.org/packages/7b/ef/5eb105530b4da8ae37d506ccfa25057961b7b63d581def6f99165ea89c7e/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", size = 144819 }, + { url = "https://files.pythonhosted.org/packages/a2/51/e5023f937d7f307c948ed3e5c29c4b7a3e42ed2ee0b8cdf8f3a706089bf0/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", size = 149415 }, + { url = "https://files.pythonhosted.org/packages/24/9d/2e3ef673dfd5be0154b20363c5cdcc5606f35666544381bee15af3778239/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", size = 141212 }, + { url = "https://files.pythonhosted.org/packages/5b/ae/ce2c12fcac59cb3860b2e2d76dc405253a4475436b1861d95fe75bdea520/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", size = 142167 }, + { url = "https://files.pythonhosted.org/packages/ed/3a/a448bf035dce5da359daf9ae8a16b8a39623cc395a2ffb1620aa1bce62b0/charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", size = 93041 }, + { url = "https://files.pythonhosted.org/packages/b6/7c/8debebb4f90174074b827c63242c23851bdf00a532489fba57fef3416e40/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", size = 100397 }, + { url = "https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", size = 48543 }, +] + +[[package]] +name = "cheap-repr" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/30/f0e9d5bfe80b8287ea8a9263eb3c71c5fdf44b6f7a781a7c96f83172ccad/cheap_repr-0.5.2.tar.gz", hash = "sha256:001a5cf8adb0305c7ad3152c5f776040ac2a559d97f85770cebcb28c6ca5a30f", size = 20232 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/52/fec0262af470a157a557e46be1d52ecdaf1695cefd80bb62bb6a07cc4ea9/cheap_repr-0.5.2-py2.py3-none-any.whl", hash = "sha256:537ec1991bfee885c13c6d473afd110a408e039cde26882e95bf92761556ab6e", size = 12228 }, +] + +[[package]] +name = "chroma-hnswlib" +version = "0.7.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/09/10d57569e399ce9cbc5eee2134996581c957f63a9addfa6ca657daf006b8/chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7", size = 32256 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/74/b9dde05ea8685d2f8c4681b517e61c7887e974f6272bb24ebc8f2105875b/chroma_hnswlib-0.7.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f35192fbbeadc8c0633f0a69c3d3e9f1a4eab3a46b65458bbcbcabdd9e895c36", size = 195821 }, + { url = "https://files.pythonhosted.org/packages/fd/58/101bfa6bc41bc6cc55fbb5103c75462a7bf882e1704256eb4934df85b6a8/chroma_hnswlib-0.7.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f007b608c96362b8f0c8b6b2ac94f67f83fcbabd857c378ae82007ec92f4d82", size = 183854 }, + { url = "https://files.pythonhosted.org/packages/17/ff/95d49bb5ce134f10d6aa08d5f3bec624eaff945f0b17d8c3fce888b9a54a/chroma_hnswlib-0.7.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:456fd88fa0d14e6b385358515aef69fc89b3c2191706fd9aee62087b62aad09c", size = 2358774 }, + { url = "https://files.pythonhosted.org/packages/3a/6d/27826180a54df80dbba8a4f338b022ba21c0c8af96fd08ff8510626dee8f/chroma_hnswlib-0.7.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dfaae825499c2beaa3b75a12d7ec713b64226df72a5c4097203e3ed532680da", size = 2392739 }, + { url = "https://files.pythonhosted.org/packages/d6/63/ee3e8b7a8f931918755faacf783093b61f32f59042769d9db615999c3de0/chroma_hnswlib-0.7.6-cp310-cp310-win_amd64.whl", hash = "sha256:2487201982241fb1581be26524145092c95902cb09fc2646ccfbc407de3328ec", size = 150955 }, + { url = "https://files.pythonhosted.org/packages/f5/af/d15fdfed2a204c0f9467ad35084fbac894c755820b203e62f5dcba2d41f1/chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca", size = 196911 }, + { url = "https://files.pythonhosted.org/packages/0d/19/aa6f2139f1ff7ad23a690ebf2a511b2594ab359915d7979f76f3213e46c4/chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f", size = 185000 }, + { url = "https://files.pythonhosted.org/packages/79/b1/1b269c750e985ec7d40b9bbe7d66d0a890e420525187786718e7f6b07913/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170", size = 2377289 }, + { url = "https://files.pythonhosted.org/packages/c7/2d/d5663e134436e5933bc63516a20b5edc08b4c1b1588b9680908a5f1afd04/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9", size = 2411755 }, + { url = "https://files.pythonhosted.org/packages/3e/79/1bce519cf186112d6d5ce2985392a89528c6e1e9332d680bf752694a4cdf/chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3", size = 151888 }, + { url = "https://files.pythonhosted.org/packages/93/ac/782b8d72de1c57b64fdf5cb94711540db99a92768d93d973174c62d45eb8/chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7", size = 197804 }, + { url = "https://files.pythonhosted.org/packages/32/4e/fd9ce0764228e9a98f6ff46af05e92804090b5557035968c5b4198bc7af9/chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912", size = 185421 }, + { url = "https://files.pythonhosted.org/packages/d9/3d/b59a8dedebd82545d873235ef2d06f95be244dfece7ee4a1a6044f080b18/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4", size = 2389672 }, + { url = "https://files.pythonhosted.org/packages/74/1e/80a033ea4466338824974a34f418e7b034a7748bf906f56466f5caa434b0/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5", size = 2436986 }, +] + +[[package]] +name = "chromadb" +version = "0.5.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bcrypt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "build", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "chroma-hnswlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "fastapi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "importlib-resources", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "kubernetes", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "mmh3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "onnxruntime", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-exporter-otlp-proto-grpc", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-instrumentation-fastapi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-sdk", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "orjson", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "overrides", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "posthog", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pypika", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tenacity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "uvicorn", extra = ["standard"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/31/7659067b51ac8b2ec355a100a77fb4d6d823aeb3ff111b6de87dfd18ace1/chromadb-0.5.5.tar.gz", hash = "sha256:84f4bfee320fb4912cbeb4d738f01690891e9894f0ba81f39ee02867102a1c4d", size = 31282293 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/4c/ee62b19a8daeed51e3c88c84b7da6047a74b786e598be3592b67a286d419/chromadb-0.5.5-py3-none-any.whl", hash = "sha256:2a5a4b84cb0fc32b380e193be68cdbadf3d9f77dbbf141649be9886e42910ddd", size = 584312 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "(platform_system == 'Windows' and sys_platform == 'darwin') or (platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "humanfriendly", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 }, +] + +[[package]] +name = "comm" +version = "0.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180 }, +] + +[[package]] +name = "coverage" +version = "7.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690 }, + { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127 }, + { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654 }, + { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598 }, + { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732 }, + { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816 }, + { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325 }, + { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418 }, + { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343 }, + { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136 }, + { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796 }, + { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244 }, + { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279 }, + { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859 }, + { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549 }, + { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477 }, + { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134 }, + { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910 }, + { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348 }, + { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230 }, + { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983 }, + { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221 }, + { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371 }, + { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455 }, + { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924 }, + { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252 }, + { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897 }, + { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606 }, + { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373 }, + { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007 }, + { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269 }, + { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886 }, + { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037 }, + { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038 }, + { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690 }, + { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765 }, + { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611 }, + { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671 }, + { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368 }, + { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758 }, + { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035 }, + { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839 }, + { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569 }, + { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927 }, + { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401 }, + { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301 }, + { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598 }, + { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307 }, + { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453 }, + { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "(python_full_version <= '3.11' and sys_platform == 'darwin') or (python_full_version <= '3.11' and sys_platform == 'linux') or (python_full_version <= '3.11' and sys_platform == 'win32')" }, +] + +[[package]] +name = "cryptography" +version = "43.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "(platform_python_implementation != 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'PyPy' and sys_platform == 'linux') or (platform_python_implementation != 'PyPy' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/ec/9fb9dcf4f91f0e5e76de597256c43eedefd8423aa59be95c70c4c3db426a/cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e", size = 686873 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/46/dcd2eb6840b9452e7fbc52720f3dc54a85eb41e68414733379e8f98e3275/cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74", size = 6239718 }, + { url = "https://files.pythonhosted.org/packages/e8/23/b0713319edff1d8633775b354f8b34a476e4dd5f4cd4b91e488baec3361a/cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895", size = 3808466 }, + { url = "https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22", size = 3998060 }, + { url = "https://files.pythonhosted.org/packages/ae/71/e073795d0d1624847f323481f7d84855f699172a632aa37646464b0e1712/cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47", size = 3792596 }, + { url = "https://files.pythonhosted.org/packages/83/25/439a8ddd8058e7f898b7d27c36f94b66c8c8a2d60e1855d725845f4be0bc/cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf", size = 4008355 }, + { url = "https://files.pythonhosted.org/packages/c7/a2/1607f1295eb2c30fcf2c07d7fd0c3772d21dcdb827de2b2730b02df0af51/cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55", size = 3899133 }, + { url = "https://files.pythonhosted.org/packages/5e/64/f41f42ddc9c583737c9df0093affb92c61de7d5b0d299bf644524afe31c1/cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431", size = 4096946 }, + { url = "https://files.pythonhosted.org/packages/cd/cd/d165adcf3e707d6a049d44ade6ca89973549bed0ab3686fa49efdeefea53/cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc", size = 2616826 }, + { url = "https://files.pythonhosted.org/packages/f9/b7/38924229e84c41b0e88d7a5eed8a29d05a44364f85fbb9ddb3984b746fd2/cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778", size = 3078700 }, + { url = "https://files.pythonhosted.org/packages/66/d7/397515233e6a861f921bd0365b162b38e0cc513fcf4f1bdd9cc7bc5a3384/cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66", size = 6242814 }, + { url = "https://files.pythonhosted.org/packages/58/aa/99b2c00a4f54c60d210d6d1759c720ecf28305aa32d6fb1bb1853f415be6/cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5", size = 3809467 }, + { url = "https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e", size = 3998617 }, + { url = "https://files.pythonhosted.org/packages/a3/62/62770f34290ebb1b6542bd3f13b3b102875b90aed4804e296f8d2a5ac6d7/cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5", size = 3794003 }, + { url = "https://files.pythonhosted.org/packages/0f/6c/b42660b3075ff543065b2c1c5a3d9bedaadcff8ebce2ee981be2babc2934/cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f", size = 4008774 }, + { url = "https://files.pythonhosted.org/packages/f7/74/028cea86db9315ba3f991e307adabf9f0aa15067011137c38b2fb2aa16eb/cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0", size = 3900098 }, + { url = "https://files.pythonhosted.org/packages/bd/f6/e4387edb55563e2546028ba4c634522fe727693d3cdd9ec0ecacedc75411/cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b", size = 4096867 }, + { url = "https://files.pythonhosted.org/packages/ce/61/55560405e75432bdd9f6cf72fa516cab623b83a3f6d230791bc8fc4afeee/cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf", size = 2616481 }, + { url = "https://files.pythonhosted.org/packages/e6/3d/696e7a0f04555c58a2813d47aaa78cb5ba863c1f453c74a4f45ae772b054/cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709", size = 3081462 }, + { url = "https://files.pythonhosted.org/packages/c6/3a/9c7d864bbcca2df77a601366a6ae3937cd78d0f21ad98441f3424592aea7/cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70", size = 3156882 }, + { url = "https://files.pythonhosted.org/packages/17/cd/d43859b09d726a905d882b6e464ccf02aa2dca2c3e76c44a0c5b169f0144/cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66", size = 3722095 }, + { url = "https://files.pythonhosted.org/packages/2e/ce/c7b912d95f0ded80ad3b50a0a6b31de813c25d9ffadbe1b26bf22d2c4518/cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f", size = 3928750 }, + { url = "https://files.pythonhosted.org/packages/ca/25/7b53082e4c373127c1fb190f70c5aca7bf7a03ac11f67ba15473bc6d9a0e/cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f", size = 3002487 }, +] + +[[package]] +name = "debugpy" +version = "1.8.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/f9/61c325a10ded8dc3ddc3e7cd2ed58c0b15b2ef4bf8b4bf2930ee98ed59ee/debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0", size = 4612118 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/36/0b423f94097cc86555f9a2c8717511863b2a680c9b44b5419d8ac1ff7bf2/debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7", size = 1711184 }, + { url = "https://files.pythonhosted.org/packages/57/0c/c2ec581541923a4d36cee4fd2419c1211c986849fc61097f87aa81fc6ad3/debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a", size = 2997629 }, + { url = "https://files.pythonhosted.org/packages/a8/46/3072c2cd3b20f435968275d316f6aea7ddbb760386324e6578278bc2eb99/debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed", size = 4764678 }, + { url = "https://files.pythonhosted.org/packages/38/25/e738d6f782beba924c0e10dfde2061152f1ea3608dff0e5a5bfb30c311e9/debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e", size = 4788002 }, + { url = "https://files.pythonhosted.org/packages/ad/72/fd138a10dda16775607316d60dd440fcd23e7560e9276da53c597b5917e9/debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a", size = 1786504 }, + { url = "https://files.pythonhosted.org/packages/e2/0e/d0e6af2d7bbf5ace847e4d3bd41f8f9d4a0764fcd8058f07a1c51618cbf2/debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b", size = 2642077 }, + { url = "https://files.pythonhosted.org/packages/f6/55/2a1dc192894ba9b368cdcce15315761a00f2d4cd7de4402179648840e480/debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408", size = 4702081 }, + { url = "https://files.pythonhosted.org/packages/7f/7f/942b23d64f4896e9f8776cf306dfd00feadc950a38d56398610a079b28b1/debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3", size = 4715571 }, + { url = "https://files.pythonhosted.org/packages/9a/82/7d9e1f75fb23c876ab379008c7cf484a1cfa5ed47ccaac8ba37c75e6814e/debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156", size = 1436398 }, + { url = "https://files.pythonhosted.org/packages/fd/b6/ee71d5e73712daf8307a9e85f5e39301abc8b66d13acd04dfff1702e672e/debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb", size = 1437465 }, + { url = "https://files.pythonhosted.org/packages/6c/d8/8e32bf1f2e0142f7e8a2c354338b493e87f2c44e77e233b3a140fb5efa03/debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7", size = 4581313 }, + { url = "https://files.pythonhosted.org/packages/f7/be/2fbaffecb063de228b2b3b6a1750b0b745e5dc645eddd52be8b329933c0b/debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c", size = 4581209 }, + { url = "https://files.pythonhosted.org/packages/02/49/b595c34d7bc690e8d225a6641618a5c111c7e13db5d9e2b756c15ce8f8c6/debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44", size = 4824118 }, +] + +[[package]] +name = "decorator" +version = "5.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/14/1e41f504a246fc224d2ac264c227975427a85caf37c3979979edb9b1b232/Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3", size = 2974416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c", size = 9561 }, +] + +[[package]] +name = "distlib" +version = "0.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/91/e2df406fb4efacdf46871c25cde65d3c6ee5e173b7e5a4547a47bae91920/distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64", size = 609931 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/41/9307e4f5f9976bc8b7fea0b66367734e8faf3ec84bc0d412d8cfabbb66cd/distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784", size = 468850 }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, +] + +[[package]] +name = "dnspython" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/7d/c871f55054e403fdfd6b8f65fd6d1c4e147ed100d3e9f9ba1fe695403939/dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc", size = 332727 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/a1/8c5287991ddb8d3e4662f71356d9656d91ab3a36618c3dd11b280df0d255/dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50", size = 307696 }, +] + +[[package]] +name = "docstring-parser" +version = "0.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533 }, +] + +[[package]] +name = "environs" +version = "9.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d4/e3/c3c6c76f3dbe3e019e9a451b35bf9f44690026a5bb1232f7b77097b72ff5/environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9", size = 20795 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/5e/f0f217dc393372681bfe05c50f06a212e78d0a3fee907a74ab451ec1dcdb/environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124", size = 12548 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612 }, +] + +[[package]] +name = "executing" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/08/41/85d2d28466fca93737592b7f3cc456d1cfd6bcd401beceeba17e8e792b50/executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147", size = 836501 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/03/6ea8b1b2a5ab40a7a60dc464d3daa7aa546e0a74d74a9f8ff551ea7905db/executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc", size = 24922 }, +] + +[[package]] +name = "fastapi" +version = "0.112.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "starlette", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/09/71a961740a1121d7cc90c99036cc3fbb507bf0c69860d08d4388f842196b/fastapi-0.112.1.tar.gz", hash = "sha256:b2537146f8c23389a7faa8b03d0bd38d4986e6983874557d95eed2acc46448ef", size = 291025 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/b0/0981f9eb5884245ed6678af234f2cbcd40f44570718caddc0360bdb4015d/fastapi-0.112.1-py3-none-any.whl", hash = "sha256:bcbd45817fc2a1cd5da09af66815b84ec0d3d634eb173d1ab468ae3103e183e4", size = 93163 }, +] + +[[package]] +name = "fastjsonschema" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/3f/3ad5e7be13b4b8b55f4477141885ab2364f65d5f6ad5f7a9daffd634d066/fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23", size = 373056 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/ca/086311cdfc017ec964b2436fe0c98c1f4efcb7e4c328956a22456e497655/fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a", size = 23543 }, +] + +[[package]] +name = "filelock" +version = "3.15.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/08/dd/49e06f09b6645156550fb9aee9cc1e59aba7efbc972d665a1bd6ae0435d4/filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb", size = 18007 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7", size = 16159 }, +] + +[[package]] +name = "flatbuffers" +version = "24.3.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/74/2df95ef84b214d2bee0886d572775a6f38793f5ca6d7630c3239c91104ac/flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4", size = 22139 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/f0/7e988a019bc54b2dbd0ad4182ef2d53488bb02e58694cd79d61369e85900/flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812", size = 26784 }, +] + +[[package]] +name = "frozenlist" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/3d/2102257e7acad73efc4a0c306ad3953f68c504c16982bbdfee3ad75d8085/frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b", size = 37820 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/35/1328c7b0f780d34f8afc1d87ebdc2bb065a123b24766a0b475f0d67da637/frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac", size = 94315 }, + { url = "https://files.pythonhosted.org/packages/f4/d6/ca016b0adcf8327714ccef969740688808c86e0287bf3a639ff582f24e82/frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868", size = 53805 }, + { url = "https://files.pythonhosted.org/packages/ae/83/bcdaa437a9bd693ba658a0310f8cdccff26bd78e45fccf8e49897904a5cd/frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776", size = 52163 }, + { url = "https://files.pythonhosted.org/packages/d4/e9/759043ab7d169b74fe05ebfbfa9ee5c881c303ebc838e308346204309cd0/frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a", size = 238595 }, + { url = "https://files.pythonhosted.org/packages/f8/ce/b9de7dc61e753dc318cf0de862181b484178210c5361eae6eaf06792264d/frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad", size = 262428 }, + { url = "https://files.pythonhosted.org/packages/36/ce/dc6f29e0352fa34ebe45421960c8e7352ca63b31630a576e8ffb381e9c08/frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c", size = 258867 }, + { url = "https://files.pythonhosted.org/packages/51/47/159ac53faf8a11ae5ee8bb9db10327575557504e549cfd76f447b969aa91/frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe", size = 229412 }, + { url = "https://files.pythonhosted.org/packages/ec/25/0c87df2e53c0c5d90f7517ca0ff7aca78d050a8ec4d32c4278e8c0e52e51/frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a", size = 239539 }, + { url = "https://files.pythonhosted.org/packages/97/94/a1305fa4716726ae0abf3b1069c2d922fcfd442538cb850f1be543f58766/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98", size = 253379 }, + { url = "https://files.pythonhosted.org/packages/53/82/274e19f122e124aee6d113188615f63b0736b4242a875f482a81f91e07e2/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75", size = 245901 }, + { url = "https://files.pythonhosted.org/packages/b8/28/899931015b8cffbe155392fe9ca663f981a17e1adc69589ee0e1e7cdc9a2/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5", size = 263797 }, + { url = "https://files.pythonhosted.org/packages/6e/4f/b8a5a2f10c4a58c52a52a40cf6cf1ffcdbf3a3b64f276f41dab989bf3ab5/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950", size = 264415 }, + { url = "https://files.pythonhosted.org/packages/b0/2c/7be3bdc59dbae444864dbd9cde82790314390ec54636baf6b9ce212627ad/frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc", size = 253964 }, + { url = "https://files.pythonhosted.org/packages/2e/ec/4fb5a88f6b9a352aed45ab824dd7ce4801b7bcd379adcb927c17a8f0a1a8/frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1", size = 44559 }, + { url = "https://files.pythonhosted.org/packages/61/15/2b5d644d81282f00b61e54f7b00a96f9c40224107282efe4cd9d2bf1433a/frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439", size = 50434 }, + { url = "https://files.pythonhosted.org/packages/01/bc/8d33f2d84b9368da83e69e42720cff01c5e199b5a868ba4486189a4d8fa9/frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0", size = 97060 }, + { url = "https://files.pythonhosted.org/packages/af/b2/904500d6a162b98a70e510e743e7ea992241b4f9add2c8063bf666ca21df/frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49", size = 55347 }, + { url = "https://files.pythonhosted.org/packages/5b/9c/f12b69997d3891ddc0d7895999a00b0c6a67f66f79498c0e30f27876435d/frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced", size = 53374 }, + { url = "https://files.pythonhosted.org/packages/ac/6e/e0322317b7c600ba21dec224498c0c5959b2bce3865277a7c0badae340a9/frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0", size = 273288 }, + { url = "https://files.pythonhosted.org/packages/a7/76/180ee1b021568dad5b35b7678616c24519af130ed3fa1e0f1ed4014e0f93/frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106", size = 284737 }, + { url = "https://files.pythonhosted.org/packages/05/08/40159d706a6ed983c8aca51922a93fc69f3c27909e82c537dd4054032674/frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068", size = 280267 }, + { url = "https://files.pythonhosted.org/packages/e0/18/9f09f84934c2b2aa37d539a322267939770362d5495f37783440ca9c1b74/frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2", size = 258778 }, + { url = "https://files.pythonhosted.org/packages/b3/c9/0bc5ee7e1f5cc7358ab67da0b7dfe60fbd05c254cea5c6108e7d1ae28c63/frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19", size = 272276 }, + { url = "https://files.pythonhosted.org/packages/12/5d/147556b73a53ad4df6da8bbb50715a66ac75c491fdedac3eca8b0b915345/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82", size = 272424 }, + { url = "https://files.pythonhosted.org/packages/83/61/2087bbf24070b66090c0af922685f1d0596c24bb3f3b5223625bdeaf03ca/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec", size = 260881 }, + { url = "https://files.pythonhosted.org/packages/a8/be/a235bc937dd803258a370fe21b5aa2dd3e7bfe0287a186a4bec30c6cccd6/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a", size = 282327 }, + { url = "https://files.pythonhosted.org/packages/5d/e7/b2469e71f082948066b9382c7b908c22552cc705b960363c390d2e23f587/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74", size = 281502 }, + { url = "https://files.pythonhosted.org/packages/db/1b/6a5b970e55dffc1a7d0bb54f57b184b2a2a2ad0b7bca16a97ca26d73c5b5/frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2", size = 272292 }, + { url = "https://files.pythonhosted.org/packages/1a/05/ebad68130e6b6eb9b287dacad08ea357c33849c74550c015b355b75cc714/frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17", size = 44446 }, + { url = "https://files.pythonhosted.org/packages/b3/21/c5aaffac47fd305d69df46cfbf118768cdf049a92ee6b0b5cb029d449dcf/frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825", size = 50459 }, + { url = "https://files.pythonhosted.org/packages/b4/db/4cf37556a735bcdb2582f2c3fa286aefde2322f92d3141e087b8aeb27177/frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae", size = 93937 }, + { url = "https://files.pythonhosted.org/packages/46/03/69eb64642ca8c05f30aa5931d6c55e50b43d0cd13256fdd01510a1f85221/frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb", size = 53656 }, + { url = "https://files.pythonhosted.org/packages/3f/ab/c543c13824a615955f57e082c8a5ee122d2d5368e80084f2834e6f4feced/frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b", size = 51868 }, + { url = "https://files.pythonhosted.org/packages/a9/b8/438cfd92be2a124da8259b13409224d9b19ef8f5a5b2507174fc7e7ea18f/frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86", size = 280652 }, + { url = "https://files.pythonhosted.org/packages/54/72/716a955521b97a25d48315c6c3653f981041ce7a17ff79f701298195bca3/frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480", size = 286739 }, + { url = "https://files.pythonhosted.org/packages/65/d8/934c08103637567084568e4d5b4219c1016c60b4d29353b1a5b3587827d6/frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09", size = 289447 }, + { url = "https://files.pythonhosted.org/packages/70/bb/d3b98d83ec6ef88f9bd63d77104a305d68a146fd63a683569ea44c3085f6/frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a", size = 265466 }, + { url = "https://files.pythonhosted.org/packages/0b/f2/b8158a0f06faefec33f4dff6345a575c18095a44e52d4f10c678c137d0e0/frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd", size = 281530 }, + { url = "https://files.pythonhosted.org/packages/ea/a2/20882c251e61be653764038ece62029bfb34bd5b842724fff32a5b7a2894/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6", size = 281295 }, + { url = "https://files.pythonhosted.org/packages/4c/f9/8894c05dc927af2a09663bdf31914d4fb5501653f240a5bbaf1e88cab1d3/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1", size = 268054 }, + { url = "https://files.pythonhosted.org/packages/37/ff/a613e58452b60166507d731812f3be253eb1229808e59980f0405d1eafbf/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b", size = 286904 }, + { url = "https://files.pythonhosted.org/packages/cc/6e/0091d785187f4c2020d5245796d04213f2261ad097e0c1cf35c44317d517/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e", size = 290754 }, + { url = "https://files.pythonhosted.org/packages/a5/c2/e42ad54bae8bcffee22d1e12a8ee6c7717f7d5b5019261a8c861854f4776/frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8", size = 282602 }, + { url = "https://files.pythonhosted.org/packages/b6/61/56bad8cb94f0357c4bc134acc30822e90e203b5cb8ff82179947de90c17f/frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89", size = 44063 }, + { url = "https://files.pythonhosted.org/packages/3e/dc/96647994a013bc72f3d453abab18340b7f5e222b7b7291e3697ca1fcfbd5/frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5", size = 50452 }, + { url = "https://files.pythonhosted.org/packages/83/10/466fe96dae1bff622021ee687f68e5524d6392b0a2f80d05001cd3a451ba/frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7", size = 11552 }, +] + +[[package]] +name = "fsspec" +version = "2024.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/b6/eba5024a9889fcfff396db543a34bef0ab9d002278f163129f9f01005960/fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49", size = 284584 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e", size = 177561 }, +] + +[[package]] +name = "google-ai-generativelanguage" +version = "0.6.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/38/3d717e70a0020cde7bef8ec998ef3c605f208cc77ba93d22450e09f4d4ee/google-ai-generativelanguage-0.6.6.tar.gz", hash = "sha256:1739f035caeeeca5c28f887405eec8690f3372daf79fecf26454a97a4f1733a8", size = 758303 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/64/bac34c331e8103a0c32df8298823520787e6ff32ea736785c46b1322d62e/google_ai_generativelanguage-0.6.6-py3-none-any.whl", hash = "sha256:59297737931f073d55ce1268dcc6d95111ee62850349d2b6cde942b16a4fca5c", size = 718256 }, +] + +[[package]] +name = "google-api-core" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "googleapis-common-protos", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/41/42a127bf163d9bf1f21540a3bf41c69b231b88707d8d753680b8878201a6/google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd", size = 148925 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/99/daa3541e8ecd7d8b7907b714ba92126097a976b5b3dbabdb5febdcf08554/google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125", size = 139384 }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio-status", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "google-api-python-client" +version = "2.142.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth-httplib2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httplib2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "uritemplate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/d2/1dc1b95e9fef7bec1df1e04941d9556b6e384691d2ba520777c68429230f/google_api_python_client-2.142.0.tar.gz", hash = "sha256:a1101ac9e24356557ca22f07ff48b7f61fa5d4b4e7feeef3bda16e5dcb86350e", size = 11680160 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/41/957e29b392728ba94d1df652e2f3ce59022a6d7bb0164575c016ad204a52/google_api_python_client-2.142.0-py2.py3-none-any.whl", hash = "sha256:266799082bb8301f423ec204dffbffb470b502abbf29efd1f83e644d36eb5a8f", size = 12186205 }, +] + +[[package]] +name = "google-auth" +version = "2.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyasn1-modules", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rsa", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/ae/634dafb151366d91eb848a25846a780dbce4326906ef005d199723fbbca0/google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc", size = 257875 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/fb/9af9e3f2996677bdda72734482934fe85a3abde174e5f0783ac2f817ba98/google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65", size = 200870 }, +] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httplib2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253 }, +] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.63.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docstring-parser", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-cloud-bigquery", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-cloud-resource-manager", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-cloud-storage", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "shapely", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/f8/f3dd468562b2ec9bee62bb52f79dbf32a754f807a29d1e82e2d9eabea314/google-cloud-aiplatform-1.63.0.tar.gz", hash = "sha256:4eb2398bed02a60ad23656b4a442b5d6efa181d11653f8c31f0a5f642c09f913", size = 6250057 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/ac/29c1caeb449a1957b6a3fb09c5e58d09dabf3c9b7d0644f74c16ae286dad/google_cloud_aiplatform-1.63.0-py2.py3-none-any.whl", hash = "sha256:857abe09d1f3f49f62000dbd2302bc653c9a4cdce67ccf65bfd5878fcc81760d", size = 5224293 }, +] + +[[package]] +name = "google-cloud-bigquery" +version = "3.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-cloud-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-resumable-media", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/07/d6f8c55f68d796a6a045cbb3c1783ed1c77ec641acbf9e6ff78b38b127a4/google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509", size = 455186 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/98/2f931388614ea894640f84c1874d72d84d890c093e334a3990e363ff689e/google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9", size = 239012 }, +] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b8/1f/9d1e0ba6919668608570418a9a51e47070ac15aeff64261fb092d8be94c0/google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073", size = 35587 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/0f/2e2061e3fbcb9d535d5da3f58cc8de4947df1786fe6a1355960feb05a681/google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61", size = 29233 }, +] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpc-google-iam-v1", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "proto-plus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/32/14d345dee1f290a26bd639da8edbca30958865b7cc7207961e10d2f32282/google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891", size = 394678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/ab/63ab13fb060714b9d1708ca32e0ee41f9ffe42a62e524e7429cde45cfe61/google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175", size = 341861 }, +] + +[[package]] +name = "google-cloud-storage" +version = "2.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-cloud-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-crc32c", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-resumable-media", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/b7/1554cdeb55d9626a4b8720746cba8119af35527b12e1780164f9ba0f659a/google_cloud_storage-2.18.2.tar.gz", hash = "sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99", size = 5532864 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/da/95db7bd4f0bd1644378ac1702c565c0210b004754d925a74f526a710c087/google_cloud_storage-2.18.2-py2.py3-none-any.whl", hash = "sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166", size = 130466 }, +] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/a5/4bb58448fffd36ede39684044df93a396c13d1ea3516f585767f9f960352/google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", size = 12689 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/9a/a9bc2603a17d4fda1827d7ab0bb18d1eb5b9df80b9e11955ed9f727ace09/google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", size = 32090 }, + { url = "https://files.pythonhosted.org/packages/f8/b3/59b49d9c5f15172a35f5560b67048eae02a54927e60c370f3b91743b79f6/google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", size = 30073 }, + { url = "https://files.pythonhosted.org/packages/34/c6/27be6fc6cbfebff08f63c2017fe885932b3387b45a0013b772f9beac7c01/google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", size = 32681 }, + { url = "https://files.pythonhosted.org/packages/b7/53/0170614ccaf34ac602c877929998dbca4923f0c401f0bea6f0d5a38a3e57/google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", size = 30022 }, + { url = "https://files.pythonhosted.org/packages/a9/d0/04f2846f0af1c683eb3b664c9de9543da1e66a791397456a65073b6054a2/google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", size = 32288 }, + { url = "https://files.pythonhosted.org/packages/f9/c2/eb43b40e799a9f85a43b358f2b4a2b4d60f8c22a7867aca5d6eb1b88b565/google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", size = 569180 }, + { url = "https://files.pythonhosted.org/packages/b9/14/e9ba87ccc931323d79574924bf582633cc467e196bb63a49bc5a75c1dd58/google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", size = 35236 }, + { url = "https://files.pythonhosted.org/packages/3f/a7/d9709429d1eae1c4907b3b9aab866de26acc5ca42c4237d216acf0b7033a/google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", size = 581671 }, + { url = "https://files.pythonhosted.org/packages/5a/6b/882314bb535e44bb5578d60859497c5b9d82103960f3b6ecdaf42d3fab34/google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", size = 23927 }, + { url = "https://files.pythonhosted.org/packages/1f/6b/fcd4744a020fa7bfb1a451b0be22b3e5a4cb28bafaaf01467d2e9402b96b/google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", size = 27318 }, + { url = "https://files.pythonhosted.org/packages/69/0f/7f89ae2b22c55273110a44a7ed55a2948bc213fb58983093fbefcdfd2d13/google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", size = 32093 }, + { url = "https://files.pythonhosted.org/packages/41/3f/8141b03ad127fc569c3efda2bfe31d64665e02e2b8b7fbf7b25ea914c27a/google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", size = 30071 }, + { url = "https://files.pythonhosted.org/packages/fc/76/3ef124b893aa280e45e95d2346160f1d1d5c0ffc89d3f6e446c83116fb91/google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", size = 32702 }, + { url = "https://files.pythonhosted.org/packages/fd/71/299a368347aeab3c89896cdfb67703161becbf5afbc1748a1850094828dc/google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", size = 30041 }, + { url = "https://files.pythonhosted.org/packages/72/92/2a2fa23db7d0b0382accbdf09768c28f7c07fc8c354cdcf2f44a47f4314e/google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", size = 32317 }, + { url = "https://files.pythonhosted.org/packages/0f/99/e7e288f1b50baf4964ff39fa79d9259d004ae44db35c8280ff4ffea362d5/google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", size = 570024 }, + { url = "https://files.pythonhosted.org/packages/88/ea/e53fbafcd0be2349d9c2a6912646cdfc47cfc5c22be9a8a5156552e33821/google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", size = 36047 }, + { url = "https://files.pythonhosted.org/packages/02/94/d2ea867760d5a27b3e9eb40ff31faf7f03f949e51d4e3b3ae24f759b5963/google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", size = 582541 }, + { url = "https://files.pythonhosted.org/packages/b7/09/768d2ca0c10a0765f83c6d06a5e40f3083cb75b8e7718ac22edff997aefc/google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", size = 23928 }, + { url = "https://files.pythonhosted.org/packages/ce/8b/02bf4765c487901c8660290ade9929d65a6151c367ba32e75d136ef2d0eb/google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", size = 27318 }, +] + +[[package]] +name = "google-generativeai" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-ai-generativelanguage", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-api-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-api-python-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/92/766c19a6ccdc3e5272ecb831e131672e290d1ca4ec5cd6a4040a78454707/google_generativeai-0.7.2-py3-none-any.whl", hash = "sha256:3117d1ebc92ee77710d4bc25ab4763492fddce9b6332eb25d124cf5d8b78b339", size = 164212 }, +] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251 }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/1a/41723ae380fa9c561cbe7b61c4eef9091d5fe95486465ccfc84845877331/googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87", size = 112890 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/48/87422ff1bddcae677fb6f58c97f5cfc613304a5e8ce2c3662760199c0a84/googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945", size = 220001 }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos", extra = ["grpc"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/41/f01bf46bac4034b4750575fe87c80c5a43a8912847307955e22f2125b60c/grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001", size = 17664 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/7d/da3875b7728bc700eeb28b513754ce237c04ac7cbf8559d76b0464ee01cb/grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e", size = 24866 }, +] + +[[package]] +name = "grpcio" +version = "1.60.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/38/c615b5c2be690fb31871f294cc08a96e598b085b8d07c5967a5018e0b90c/grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96", size = 24766390 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/88/f5a1e1441180a57a409ccb26a7db20ec5686973698f8b6119412dedb7368/grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139", size = 99933161 }, + { url = "https://files.pythonhosted.org/packages/e6/54/58c17c86f3410fdfc843dddbbafa2d71a61f96b7a3832b6ad299d4359833/grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff", size = 9627835 }, + { url = "https://files.pythonhosted.org/packages/d8/86/b082d195d1c0ac885a9bec7ced2e6811856bef745efef4c604fe97e72614/grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491", size = 5115762 }, + { url = "https://files.pythonhosted.org/packages/37/11/a360319387e90b911dc0458eacbd90c615660e4ed415cb0a81eb18685c10/grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43", size = 5621840 }, + { url = "https://files.pythonhosted.org/packages/ed/bd/4dbe2ae13ffba7eef2a3bd2dcebbc2255da18d1a972a89952d55e8ad3d4b/grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae", size = 5356182 }, + { url = "https://files.pythonhosted.org/packages/09/43/98b53f2fccc2389adfc60720a514d029a728d028641a4289788aa22c3981/grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508", size = 5904203 }, + { url = "https://files.pythonhosted.org/packages/73/3c/d7bd58d4784b04813d21ac8c9bd99d36c9c4dd911c3fb5d683b59ccbc7af/grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b", size = 5605970 }, + { url = "https://files.pythonhosted.org/packages/7e/f0/01938fee8517de7c41fb8dbc84a8aafd309ae4c4e3ae1e652a66b4e76af9/grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d", size = 3131685 }, + { url = "https://files.pythonhosted.org/packages/9e/7f/adf4bc4c2d54e496eca16a856ddfdace57e7ace01ac9ffcd2abf888c47e6/grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df", size = 3702013 }, + { url = "https://files.pythonhosted.org/packages/28/98/1c5218ed23e4c5ba58058e52d39206871feba4e1d17bddfb4da48e441101/grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd", size = 100133810 }, + { url = "https://files.pythonhosted.org/packages/5c/45/8708497bc482cc7bf3779df9cf00c8e9efe1df5cd29b77e3eb060c141f84/grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14", size = 9650227 }, + { url = "https://files.pythonhosted.org/packages/56/0a/5320d3ba32ac3ba98a18606bedcec89b571c40d31f62302196ceac835e91/grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c", size = 5120380 }, + { url = "https://files.pythonhosted.org/packages/3e/7c/fd25f2e5247383d994b90a2d9522090bbc9e609547504613ea351928d2c7/grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134", size = 5625772 }, + { url = "https://files.pythonhosted.org/packages/de/01/a8d9bcc59526f22b8fef29c234cc63434f05dae1154d979222c02b31a557/grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253", size = 5354817 }, + { url = "https://files.pythonhosted.org/packages/13/4c/9d6ffdfcaa22f380dfd2b459b9761249ad61cfde65a927d832b3800d139b/grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444", size = 5908054 }, + { url = "https://files.pythonhosted.org/packages/7c/1e/f7b9c72ae6560d92027aac51f90a827051c3766ea961bc2d1b78c3657437/grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d", size = 5604825 }, + { url = "https://files.pythonhosted.org/packages/2d/2f/fd5ff4cf5a307dae7ba6b72962c904bcb26f08ea3df139019fdf5c40b298/grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320", size = 3127779 }, + { url = "https://files.pythonhosted.org/packages/6a/b9/f94bea4c6f0e322a239f7ba66ba3b0ce766d1c6a2d50055f7c8acf0fba38/grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b", size = 3699392 }, + { url = "https://files.pythonhosted.org/packages/61/f9/e3c4b4a879096fe608d75e2a5b4b3790baa91137c5d5da259f98128d2f86/grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18", size = 100617931 }, + { url = "https://files.pythonhosted.org/packages/dd/7d/5005318879231a879be0d33c588400941aee08ea8b5b45d3a9061d6bf0fb/grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748", size = 9612074 }, + { url = "https://files.pythonhosted.org/packages/f1/b5/93ea03649a8315fe00b11871bb7fa807e1ee22d14f5c4de2fbc288c6cd37/grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e", size = 5061795 }, + { url = "https://files.pythonhosted.org/packages/c9/b8/91b5b56f7812372bd51342126f0184a1a604723b0f58466ac20c2dcef63a/grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b", size = 5566289 }, + { url = "https://files.pythonhosted.org/packages/d7/2e/3337baee24c902d9e82f1eac00bc9dca106934763c4cd0faf819ef01b96b/grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55", size = 5300194 }, + { url = "https://files.pythonhosted.org/packages/8c/ea/b1229842677f5b712f72760d1633cf36813ec121c986454d6eba6de22093/grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca", size = 5852832 }, + { url = "https://files.pythonhosted.org/packages/05/dc/c641498f09246a61ebe7a721888edf772e2ecdfd524e25ac61e27352d9d3/grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5", size = 5555224 }, + { url = "https://files.pythonhosted.org/packages/4d/a3/0f07d9fdb9dddce85bbcc671bf49ed3c73301dfc3108ed4ab3212d55ef13/grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951", size = 3111209 }, + { url = "https://files.pythonhosted.org/packages/73/99/a7b768c6a9873b6f450476bfa389eeef877f152aeb443bec2bd91d9fb5a2/grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a", size = 3691893 }, +] + +[[package]] +name = "grpcio-health-checking" +version = "1.60.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/24/d58e2855bedfe4150718e03babcadb68d3dd69803cfdb45d27195bafcd20/grpcio-health-checking-1.60.0.tar.gz", hash = "sha256:478b5300778120fed9f6d134d72b157a59f9c06689789218cbff47fafca2f119", size = 16324 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/d7/98a877cabb6e0e1dd514f16d77b45036a8add1b0457a6e92c695baed9ded/grpcio_health_checking-1.60.0-py3-none-any.whl", hash = "sha256:13caf28bc93795bd6bdb580b21832ebdd1aa3f5b648ea47ed17362d85bed96d3", size = 18545 }, +] + +[[package]] +name = "grpcio-status" +version = "1.60.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/38/0cd65d29f8fe0b5efaef60a0664885b5457a566b1a531d3e6b76a8bb0f21/grpcio-status-1.60.0.tar.gz", hash = "sha256:f10e0b6db3adc0fdc244b71962814ee982996ef06186446b5695b9fa635aa1ab", size = 13546 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/bd/f46d6511088f314cfedc880721fd32d387b8513b22da01cf4771d7439a2b/grpcio_status-1.60.0-py3-none-any.whl", hash = "sha256:7d383fa36e59c1e61d380d91350badd4d12ac56e4de2c2b831b050362c3c572e", size = 14448 }, +] + +[[package]] +name = "grpcio-tools" +version = "1.60.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/8f/1861529938e4a27f8d9b736a4ba58846ab1ccf63b6d7610a86a0329ffc46/grpcio-tools-1.60.0.tar.gz", hash = "sha256:ed30499340228d733ff69fcf4a66590ed7921f94eb5a2bf692258b1280b9dac7", size = 4611505 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/fe/3552c6e900d86fa21ec7b18ce93e912fbf8d79ee5ea4b41a0cb5bbf75b1a/grpcio_tools-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:6807b7a3f3e6e594566100bd7fe04a2c42ce6d5792652677f1aaf5aa5adaef3d", size = 63932019 }, + { url = "https://files.pythonhosted.org/packages/c3/7f/44bb9eba5797e1cfebaa28bf9cb61f0b337d407953ccc377a66e0777501b/grpcio_tools-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:857c5351e9dc33a019700e171163f94fcc7e3ae0f6d2b026b10fda1e3c008ef1", size = 5120499 }, + { url = "https://files.pythonhosted.org/packages/1e/1f/670010f510a0f28f912e5080ebfa02bc8c809e6aaef8394ebfbe12593de9/grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:ec0e401e9a43d927d216d5169b03c61163fb52b665c5af2fed851357b15aef88", size = 2707837 }, + { url = "https://files.pythonhosted.org/packages/fd/c1/bb2198f3480d3acb7683708e729732b7f12ccbc4db0cb70b59a257928f88/grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e68dc4474f30cad11a965f0eb5d37720a032b4720afa0ec19dbcea2de73b5aae", size = 3060242 }, + { url = "https://files.pythonhosted.org/packages/3c/7d/00a156dba65c9965e6e94988ab518c4ea88f95e1b70c2b61b34dd65124b5/grpcio_tools-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbf0ed772d2ae7e8e5d7281fcc00123923ab130b94f7a843eee9af405918f924", size = 2795167 }, + { url = "https://files.pythonhosted.org/packages/6d/6a/a4980794503537474ca27d13ffedc200610a631c8cf047c0b311d19fb015/grpcio_tools-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c771b19dce2bfe06899247168c077d7ab4e273f6655d8174834f9a6034415096", size = 3673788 }, + { url = "https://files.pythonhosted.org/packages/fc/3e/809c98c5423ac8374a55aa90e9d222a5da542aa13fd18b8181cfd01bb6cd/grpcio_tools-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5614cf0960456d21d8a0f4902e3e5e3bcacc4e400bf22f196e5dd8aabb978b7", size = 3281881 }, + { url = "https://files.pythonhosted.org/packages/f1/ff/282a802232e8de69221cd41c5045950d4253fe7d5d2e24574e5637c8184c/grpcio_tools-1.60.0-cp310-cp310-win32.whl", hash = "sha256:87cf439178f3eb45c1a889b2e4a17cbb4c450230d92c18d9c57e11271e239c55", size = 921927 }, + { url = "https://files.pythonhosted.org/packages/fe/b7/79ec64ad16b9159458ab29b485511a7dc7cf9c9f1cc9ba6e1bbc91f61646/grpcio_tools-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:687f576d7ff6ce483bc9a196d1ceac45144e8733b953620a026daed8e450bc38", size = 1068077 }, + { url = "https://files.pythonhosted.org/packages/7b/3c/233eb8db31c08f29ea84f690f6f25e2fd02477c1986ba13096e24b828878/grpcio_tools-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2a8a758701f3ac07ed85f5a4284c6a9ddefcab7913a8e552497f919349e72438", size = 63937622 }, + { url = "https://files.pythonhosted.org/packages/7e/7f/47d8b35172b7f94b93c8ea4b7229f40a19d6da13bca976b6e85bbe7ef010/grpcio_tools-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:7c1cde49631732356cb916ee1710507967f19913565ed5f9991e6c9cb37e3887", size = 5147769 }, + { url = "https://files.pythonhosted.org/packages/c9/4d/b601d7bc72f453a1e9f9962be5a4ee81b5cae70b08bac5339e876cec355a/grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:d941749bd8dc3f8be58fe37183143412a27bec3df8482d5abd6b4ec3f1ac2924", size = 2708357 }, + { url = "https://files.pythonhosted.org/packages/90/ec/bc2902d5a753b59920082ba4e6b9b7adb8b3c076c327639494a32b51a953/grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ee35234f1da8fba7ddbc544856ff588243f1128ea778d7a1da3039be829a134", size = 3060323 }, + { url = "https://files.pythonhosted.org/packages/29/0f/fdfa88aff42abc0caa29f74cfa47e77ea1d6385c073c082fef582ac0ec9f/grpcio_tools-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8f7a5094adb49e85db13ea3df5d99a976c2bdfd83b0ba26af20ebb742ac6786", size = 2795700 }, + { url = "https://files.pythonhosted.org/packages/33/20/36584dff9564d1237f8fb90dc151d76dac8d00ac86dbd53bc99cc25767e1/grpcio_tools-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:24c4ead4a03037beaeb8ef2c90d13d70101e35c9fae057337ed1a9144ef10b53", size = 3674639 }, + { url = "https://files.pythonhosted.org/packages/da/99/c08d1160f08089e7b422e6b97351cf17843a5b4bebc8ac5d98c8af8db7da/grpcio_tools-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811abb9c4fb6679e0058dfa123fb065d97b158b71959c0e048e7972bbb82ba0f", size = 3282610 }, + { url = "https://files.pythonhosted.org/packages/56/a7/378ccd3e8ec1e57fa62f9d60e7da6afece565b105f86d4393a8eabbccba4/grpcio_tools-1.60.0-cp311-cp311-win32.whl", hash = "sha256:bd2a17b0193fbe4793c215d63ce1e01ae00a8183d81d7c04e77e1dfafc4b2b8a", size = 922153 }, + { url = "https://files.pythonhosted.org/packages/61/19/528588f68effc32be1f5803f11d5dd66833e53a99384c0e1e4c53b78d42b/grpcio_tools-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:b22b1299b666eebd5752ba7719da536075eae3053abcf2898b65f763c314d9da", size = 1067992 }, + { url = "https://files.pythonhosted.org/packages/50/09/16b77ffe4f0e3f03c98407a82485e8c9c15bc433334965fbd31a9dfa127b/grpcio_tools-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:74025fdd6d1cb7ba4b5d087995339e9a09f0c16cf15dfe56368b23e41ffeaf7a", size = 63964335 }, + { url = "https://files.pythonhosted.org/packages/21/2f/3b4f50a810bc9892ac094b29c5c66e575a56813cb4e73fc9a4c7d2dccd3c/grpcio_tools-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:5a907a4f1ffba86501b2cdb8682346249ea032b922fc69a92f082ba045cca548", size = 5147864 }, + { url = "https://files.pythonhosted.org/packages/7c/28/f3baa87c8e53b7694761ea69d5d9c3f635b54ff7c09761e3593ca59344b3/grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:1fbb9554466d560472f07d906bfc8dcaf52f365c2a407015185993e30372a886", size = 2709526 }, + { url = "https://files.pythonhosted.org/packages/9d/07/87e5c0c70dfa0aefc130a6e9116a54866d72449706b35902fbbf3f57f37e/grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f10ef47460ce3c6fd400f05fe757b90df63486c9b84d1ecad42dcc5f80c8ac14", size = 3061068 }, + { url = "https://files.pythonhosted.org/packages/b4/cb/e8ad1dd2caac2de9e3a0e6627024ffca3bf30c9911e691f88b7dca4e5097/grpcio_tools-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:321b18f42a70813545e416ddcb8bf20defa407a8114906711c9710a69596ceda", size = 2797033 }, + { url = "https://files.pythonhosted.org/packages/ba/1d/8c8048c00c194aa8d5648aba853df4076be6d70e9a00a1f25d4830b6dee8/grpcio_tools-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:081336d8258f1a56542aa8a7a5dec99a2b38d902e19fbdd744594783301b0210", size = 3674987 }, + { url = "https://files.pythonhosted.org/packages/a4/48/dae5740b16b9fdd937fa3bf4f29b6c95b8e0d2dc06a5e82a59e2aa67f07b/grpcio_tools-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:addc9b23d6ff729d9f83d4a2846292d4c84f5eb2ec38f08489a6a0d66ac2b91e", size = 3283144 }, + { url = "https://files.pythonhosted.org/packages/9b/b6/87d859bf481a2e5629c1ea14a741faa90d533b756af0c514cbff06b00c71/grpcio_tools-1.60.0-cp312-cp312-win32.whl", hash = "sha256:e87cabac7969bdde309575edc2456357667a1b28262b2c1f12580ef48315b19d", size = 922614 }, + { url = "https://files.pythonhosted.org/packages/a8/0a/d6fea138f949f307f2e6958fbf6a3cd94a2d6a51ba3a6333a36b02e24459/grpcio_tools-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e70d867c120d9849093b0ac24d861e378bc88af2552e743d83b9f642d2caa7c2", size = 1068418 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "h2" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "hyperframe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/32/fec683ddd10629ea4ea46d206752a95a2d8a48c22521edd70b142488efe1/h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb", size = 2145593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e5/db6d438da759efbb488c4f3fbdab7764492ff3c3f953132efa6b9f0e9e53/h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d", size = 57488 }, +] + +[[package]] +name = "hiredis" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/80/740fb0dfa7a42416ce8376490f41dcdb1e5deed9c3739dfe4200fad865a9/hiredis-3.0.0.tar.gz", hash = "sha256:fed8581ae26345dea1f1e0d1a96e05041a727a45e7d8d459164583e23c6ac441", size = 87581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/cc/41521d38c77f404c31e08a0118f369f37dc6a9e19cf315dbbc8b0b8afaba/hiredis-3.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:4b182791c41c5eb1d9ed736f0ff81694b06937ca14b0d4dadde5dadba7ff6dae", size = 81483 }, + { url = "https://files.pythonhosted.org/packages/99/35/0138fe68b0da01ea91ad67910577905b7f4a34b5c11e2f665d44067c52df/hiredis-3.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:13c275b483a052dd645eb2cb60d6380f1f5215e4c22d6207e17b86be6dd87ffa", size = 44763 }, + { url = "https://files.pythonhosted.org/packages/45/53/64fa74d43c17a406c2dc3cb4f1a3729ac00c5451f31f5940ca577b24afa9/hiredis-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1018cc7f12824506f165027eabb302735b49e63af73eb4d5450c66c88f47026", size = 42452 }, + { url = "https://files.pythonhosted.org/packages/af/b8/40c58b7db70e3850adeac85d5fca67e2fce6bf15c2705ca6af9c8bb32b5d/hiredis-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83a29cc7b21b746cb6a480189e49f49b2072812c445e66a9e38d2004d496b81c", size = 165712 }, + { url = "https://files.pythonhosted.org/packages/ff/8e/7afd36941d58cb0a7f0142ba3a043a5b3743dfff60596e98b355fb048113/hiredis-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e241fab6332e8fb5f14af00a4a9c6aefa22f19a336c069b7ddbf28ef8341e8d6", size = 176842 }, + { url = "https://files.pythonhosted.org/packages/ff/39/482970200e65cdcea037a595083e145fc089b8368312f6f2b0d3c5a7c266/hiredis-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fb8de899f0145d6c4d5d4bd0ee88a78eb980a7ffabd51e9889251b8f58f1785", size = 166127 }, + { url = "https://files.pythonhosted.org/packages/3a/2b/655e8b4b54ff28c88e2ac536d4aa24c9119c6160169c043351a91db69bca/hiredis-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b23291951959141173eec10f8573538e9349fa27f47a0c34323d1970bf891ee5", size = 165983 }, + { url = "https://files.pythonhosted.org/packages/81/d8/bc917412f95da9904a83a04263aa2760051c118d0199eac7250623bfcf17/hiredis-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e421ac9e4b5efc11705a0d5149e641d4defdc07077f748667f359e60dc904420", size = 162249 }, + { url = "https://files.pythonhosted.org/packages/77/93/d6585264bb50f9f79537429fa90f4a2a5c29fd5e70d57dec7705ff161a7c/hiredis-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:77c8006c12154c37691b24ff293c077300c22944018c3ff70094a33e10c1d795", size = 160013 }, + { url = "https://files.pythonhosted.org/packages/48/a5/302868a60e963c1b768bd5622f125f5b38a3ea084bdcb374c9251dcc7c02/hiredis-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:41afc0d3c18b59eb50970479a9c0e5544fb4b95e3a79cf2fbaece6ddefb926fe", size = 159315 }, + { url = "https://files.pythonhosted.org/packages/82/77/c02d516ab8f31d85378916055dbf980ef7ca431d93ba1f7ac11ac4304863/hiredis-3.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:04ccae6dcd9647eae6025425ab64edb4d79fde8b9e6e115ebfabc6830170e3b2", size = 171008 }, + { url = "https://files.pythonhosted.org/packages/e1/28/c080805a340b418b1d022fa58465e365636c0ed201837e0fe70cc7beb0d3/hiredis-3.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fe91d62b0594db5ea7d23fc2192182b1a7b6973f628a9b8b2e0a42a2be721ac6", size = 163290 }, + { url = "https://files.pythonhosted.org/packages/6a/f9/caacca69987de597487360565e34dfd191ab23ce147144c13df1f2db6c8d/hiredis-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99516d99316062824a24d145d694f5b0d030c80da693ea6f8c4ecf71a251d8bb", size = 161037 }, + { url = "https://files.pythonhosted.org/packages/88/3a/0d560473ca21facc1de5ba538f655aeae71303afd71f2a5e35fadee0c698/hiredis-3.0.0-cp310-cp310-win32.whl", hash = "sha256:562eaf820de045eb487afaa37e6293fe7eceb5b25e158b5a1974b7e40bf04543", size = 20034 }, + { url = "https://files.pythonhosted.org/packages/9c/af/23c2ce80faffb0ceb1775fe4581829c229400d6faacc0e2567ae179e8bc2/hiredis-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a1c81c89ed765198da27412aa21478f30d54ef69bf5e4480089d9c3f77b8f882", size = 21863 }, + { url = "https://files.pythonhosted.org/packages/42/3e/502e2ce2487673214fbb4cc733b1a279bc71309a689803d9ba8ad6f2fa8f/hiredis-3.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:4664dedcd5933364756d7251a7ea86d60246ccf73a2e00912872dacbfcef8978", size = 81442 }, + { url = "https://files.pythonhosted.org/packages/18/0b/171d85b2ee0ac51f94e993a323beffdb6b273b838a4f86d9abaaca22e2f7/hiredis-3.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:47de0bbccf4c8a9f99d82d225f7672b9dd690d8fd872007b933ef51a302c9fa6", size = 44742 }, + { url = "https://files.pythonhosted.org/packages/6a/67/466e0b16caff07bc8df8f3ff8b0b279f81066e0fb6a201b0ec66288fe5a4/hiredis-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e43679eca508ba8240d016d8cca9d27342d70184773c15bea78a23c87a1922f1", size = 42424 }, + { url = "https://files.pythonhosted.org/packages/01/50/e1f21e1cc9426bdf62e9ca8106294fbc3e5d27ddbae2e85e47fb9f251d1b/hiredis-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13c345e7278c210317e77e1934b27b61394fee0dec2e8bd47e71570900f75823", size = 166331 }, + { url = "https://files.pythonhosted.org/packages/98/40/8d8e4e15045ce066570f82f49604c6273b186eda1e5c9b93b450dd25d7b9/hiredis-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00018f22f38530768b73ea86c11f47e8d4df65facd4e562bd78773bd1baef35e", size = 177350 }, + { url = "https://files.pythonhosted.org/packages/5d/9c/f7b6d7afa2bd9c6671de853069222d9d874725e387100dfb0f1a22aab122/hiredis-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ea3a86405baa8eb0d3639ced6926ad03e07113de54cb00fd7510cb0db76a89d", size = 166794 }, + { url = "https://files.pythonhosted.org/packages/53/0c/1076e0c045412081ec44dc81969373cda15c093a0692e10f2941e154e583/hiredis-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c073848d2b1d5561f3903879ccf4e1a70c9b1e7566c7bdcc98d082fa3e7f0a1d", size = 166566 }, + { url = "https://files.pythonhosted.org/packages/05/69/e081b023f86b0128fcf9f76c8ed5a5f9426895ad86de234b0332c18a57b8/hiredis-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a8dffb5f5b3415a4669d25de48b617fd9d44b0bccfc4c2ab24b06406ecc9ecb", size = 162561 }, + { url = "https://files.pythonhosted.org/packages/96/e0/7f957fb2158c6f6800b6faa2f90bedcc485ca038a2d42166761d400683a3/hiredis-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:22c17c96143c2a62dfd61b13803bc5de2ac526b8768d2141c018b965d0333b66", size = 160472 }, + { url = "https://files.pythonhosted.org/packages/5c/31/d68020aa6276bd1a7436ece96d540ad17c204d97285639e0757ef1c3d430/hiredis-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c3ece960008dab66c6b8bb3a1350764677ee7c74ccd6270aaf1b1caf9ccebb46", size = 159705 }, + { url = "https://files.pythonhosted.org/packages/f7/68/5d101f8ffd764a96c2b959815adebb1e4b7e06db68122f9d3dbbc19b81eb/hiredis-3.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f75999ae00a920f7dce6ecae76fa5e8674a3110e5a75f12c7a2c75ae1af53396", size = 171498 }, + { url = "https://files.pythonhosted.org/packages/83/86/66131743a2012f668f84aa2eddc07e7b2462b4a07a753b27125f14e4b8bc/hiredis-3.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e069967cbd5e1900aafc4b5943888f6d34937fc59bf8918a1a546cb729b4b1e4", size = 163951 }, + { url = "https://files.pythonhosted.org/packages/a5/ea/58976d9c21086975a90c7fa2337591ea3903eeb55083e366b5ea36b99ca5/hiredis-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0aacc0a78e1d94d843a6d191f224a35893e6bdfeb77a4a89264155015c65f126", size = 161566 }, + { url = "https://files.pythonhosted.org/packages/39/69/cdb255e3d37f82f31f4b7b2db5bbd8500eae8d22c0d7992fe474fd02babd/hiredis-3.0.0-cp311-cp311-win32.whl", hash = "sha256:719c32147ba29528cb451f037bf837dcdda4ff3ddb6cdb12c4216b0973174718", size = 20037 }, + { url = "https://files.pythonhosted.org/packages/9d/cf/40d209e0458ac28a26973d1449df2922c7b8259f7f88d7738d11c87f9ff6/hiredis-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:bdc144d56333c52c853c31b4e2e52cfbdb22d3da4374c00f5f3d67c42158970f", size = 21862 }, + { url = "https://files.pythonhosted.org/packages/ae/09/0a3eace00115d8c82a8e7d8e58e60aacec10334f4f1512f09ffbac3252e3/hiredis-3.0.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:484025d2eb8f6348f7876fc5a2ee742f568915039fcb31b478fd5c242bb0fe3a", size = 81540 }, + { url = "https://files.pythonhosted.org/packages/1c/e8/1a7a5ded4fb11e91aafc5ba5518392f22883d54e79c4b47f188fb712ea46/hiredis-3.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fcdb552ffd97151dab8e7bc3ab556dfa1512556b48a367db94b5c20253a35ee1", size = 44814 }, + { url = "https://files.pythonhosted.org/packages/3b/f5/4e055dc9b55484644afb18063f28649cdbd19be4f15bc152bd633dccd6f7/hiredis-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bb6f9fd92f147ba11d338ef5c68af4fd2908739c09e51f186e1d90958c68cc1", size = 42478 }, + { url = "https://files.pythonhosted.org/packages/65/7b/e06f55b9dcdf10cb6b3f08d7917d3080096cd83deaef1bd4927720fbb280/hiredis-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa86bf9a0ed339ec9e8a9a9d0ae4dccd8671625c83f9f9f2640729b15e07fbfd", size = 168303 }, + { url = "https://files.pythonhosted.org/packages/f4/16/081e90137bb896acd9dc2e1e68480cc84d652af4d959e75e52d6ce9dd602/hiredis-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e194a0d5df9456995d8f510eab9f529213e7326af6b94770abf8f8b7952ddcaa", size = 179151 }, + { url = "https://files.pythonhosted.org/packages/1e/0f/f5aba1c82977f4b639e5b450c0d8685333f1200cd1972647eb3f4d972e55/hiredis-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a1df39d74ec507d79c7a82c8063eee60bf80537cdeee652f576059b9cdd15c", size = 168580 }, + { url = "https://files.pythonhosted.org/packages/60/86/aa24c20f6d3038bf244bc60a2fe8cde61fb3c0d6a82e2bed30b08d55f96c/hiredis-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f91456507427ba36fd81b2ca11053a8e112c775325acc74e993201ea912d63e9", size = 169147 }, + { url = "https://files.pythonhosted.org/packages/6e/03/a4c7a28b6320ef3e36062c1c51e9d66e889c9e09ee7d7ae38b8a2ffdb365/hiredis-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9862db92ef67a8a02e0d5370f07d380e14577ecb281b79720e0d7a89aedb9ee5", size = 164722 }, + { url = "https://files.pythonhosted.org/packages/cd/66/d60106b56ba0ddd9789656d204a577591ff0cd91ab94178bb96c84d0d918/hiredis-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d10fcd9e0eeab835f492832b2a6edb5940e2f1230155f33006a8dfd3bd2c94e4", size = 162561 }, + { url = "https://files.pythonhosted.org/packages/6a/30/f33f2b782096efe9fe6b24c67a4df13b5055d9c859f615a74fb4f18cce41/hiredis-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:48727d7d405d03977d01885f317328dc21d639096308de126c2c4e9950cbd3c9", size = 161388 }, + { url = "https://files.pythonhosted.org/packages/45/02/34d9b151f9ea4655bfe00e0230f7db8fd8a52c7b7bd728efdf1c17655860/hiredis-3.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e0bb6102ebe2efecf8a3292c6660a0e6fac98176af6de67f020bea1c2343717", size = 173561 }, + { url = "https://files.pythonhosted.org/packages/cf/54/68285d208918b6d83e32d872d8dcbf8d479ed2c74b863b836e48a2702a3f/hiredis-3.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:df274e3abb4df40f4c7274dd3e587dfbb25691826c948bc98d5fead019dfb001", size = 165914 }, + { url = "https://files.pythonhosted.org/packages/56/4f/5f36865f9f032caf00d603ff9cbde21506d2b1e0e0ce0b5d2ce2851411c9/hiredis-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:034925b5fb514f7b11aac38cd55b3fd7e9d3af23bd6497f3f20aa5b8ba58e232", size = 163968 }, + { url = "https://files.pythonhosted.org/packages/d3/ee/c38693bd1dbce34806ecc3536dc425e87e420030de7018194865511860c2/hiredis-3.0.0-cp312-cp312-win32.whl", hash = "sha256:120f2dda469b28d12ccff7c2230225162e174657b49cf4cd119db525414ae281", size = 20189 }, + { url = "https://files.pythonhosted.org/packages/4e/67/f50b45071bb8652fa9a28a84ee470a02042fb7a096a16f3c08842f2a5c2b/hiredis-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e584fe5f4e6681d8762982be055f1534e0170f6308a7a90f58d737bab12ff6a8", size = 21971 }, + { url = "https://files.pythonhosted.org/packages/6c/26/fee1a29d7d0cbb76e27ac0914bb17565b1d7cfa24d58922010a667190afc/hiredis-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50da7a9edf371441dfcc56288d790985ee9840d982750580710a9789b8f4a290", size = 39805 }, + { url = "https://files.pythonhosted.org/packages/c7/da/4e9fadc0615958b58e6632d6e85375062f80b60b268b21fa3f449aeee02e/hiredis-3.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b285ef6bf1581310b0d5e8f6ce64f790a1c40e89c660e1320b35f7515433672", size = 36883 }, + { url = "https://files.pythonhosted.org/packages/cf/d5/cc88b23e466ee070e0109a3e7d7e7835608ad90f80d8415bf7c8c726e71d/hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcfa684966f25b335072115de2f920228a3c2caf79d4bfa2b30f6e4f674a948", size = 47867 }, + { url = "https://files.pythonhosted.org/packages/09/5b/848006ee860cf543a8b964c17ef04a61ea16967c9b5f173557286ae1afd2/hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a41be8af1fd78ca97bc948d789a09b730d1e7587d07ca53af05758f31f4b985d", size = 48254 }, + { url = "https://files.pythonhosted.org/packages/91/41/ef57d7f6f324ea5052d707a510093ec61fde8c5f271029116490790168cf/hiredis-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:038756db735e417ab36ee6fd7725ce412385ed2bd0767e8179a4755ea11b804f", size = 55556 }, + { url = "https://files.pythonhosted.org/packages/81/52/150658b3006241f2de243e2ccb7f94cfeb74a855435e872dbde7d87f6842/hiredis-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fcecbd39bd42cef905c0b51c9689c39d0cc8b88b1671e7f40d4fb213423aef3a", size = 21938 }, +] + +[[package]] +name = "hpack" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/9b/fda93fb4d957db19b0f6b370e79d586b3e8528b20252c729c476a2c02954/hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095", size = 49117 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/34/e8b383f35b77c402d28563d2b8f83159319b509bc5f760b15d60b0abf165/hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c", size = 32611 }, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "h11", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/b0/5e8b8674f8d203335a62fdfcfa0d11ebe09e23613c3391033cbba35f7926/httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61", size = 83234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/d4/e5d7e4f2174f8a4d63c8897d79eb8fe2503f7ecc03282fee1fa2719c2704/httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5", size = 77926 }, +] + +[[package]] +name = "httplib2" +version = "0.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/ad/2371116b22d616c194aa25ec410c9c6c37f23599dcd590502b74db197584/httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81", size = 351116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/6c/d2fbdaaa5959339d53ba38e94c123e4e84b8fbc4b84beb0e70d7c1608486/httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", size = 96854 }, +] + +[[package]] +name = "httptools" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/67/1d/d77686502fced061b3ead1c35a2d70f6b281b5f723c4eff7a2277c04e4a2/httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a", size = 191228 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/6a/80bce0216b63babf51cdc34814c3f0f10489e13ab89fb6bc91202736a8a2/httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f", size = 149778 }, + { url = "https://files.pythonhosted.org/packages/bd/7d/4cd75356dfe0ed0b40ca6873646bf9ff7b5138236c72338dc569dc57d509/httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563", size = 77604 }, + { url = "https://files.pythonhosted.org/packages/4e/74/6348ce41fb5c1484f35184c172efb8854a288e6090bb54e2210598268369/httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58", size = 346717 }, + { url = "https://files.pythonhosted.org/packages/65/e7/dd5ba95c84047118a363f0755ad78e639e0529be92424bb020496578aa3b/httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185", size = 341442 }, + { url = "https://files.pythonhosted.org/packages/d8/97/b37d596bc32be291477a8912bf9d1508d7e8553aa11a30cd871fd89cbae4/httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142", size = 354531 }, + { url = "https://files.pythonhosted.org/packages/99/c9/53ed7176583ec4b4364d941a08624288f2ae55b4ff58b392cdb68db1e1ed/httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658", size = 347754 }, + { url = "https://files.pythonhosted.org/packages/1e/fc/8a26c2adcd3f141e4729897633f03832b71ebea6f4c31cce67a92ded1961/httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b", size = 58165 }, + { url = "https://files.pythonhosted.org/packages/f5/d1/53283b96ed823d5e4d89ee9aa0f29df5a1bdf67f148e061549a595d534e4/httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1", size = 145855 }, + { url = "https://files.pythonhosted.org/packages/80/dd/cebc9d4b1d4b70e9f3d40d1db0829a28d57ca139d0b04197713816a11996/httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0", size = 75604 }, + { url = "https://files.pythonhosted.org/packages/76/7a/45c5a9a2e9d21f7381866eb7b6ead5a84d8fe7e54e35208eeb18320a29b4/httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc", size = 324784 }, + { url = "https://files.pythonhosted.org/packages/59/23/047a89e66045232fb82c50ae57699e40f70e073ae5ccd53f54e532fbd2a2/httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2", size = 318547 }, + { url = "https://files.pythonhosted.org/packages/82/f5/50708abc7965d7d93c0ee14a148ccc6d078a508f47fe9357c79d5360f252/httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837", size = 330211 }, + { url = "https://files.pythonhosted.org/packages/e3/1e/9823ca7aab323c0e0e9dd82ce835a6e93b69f69aedffbc94d31e327f4283/httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d", size = 322174 }, + { url = "https://files.pythonhosted.org/packages/14/e4/20d28dfe7f5b5603b6b04c33bb88662ad749de51f0c539a561f235f42666/httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3", size = 55434 }, + { url = "https://files.pythonhosted.org/packages/60/13/b62e086b650752adf9094b7e62dab97f4cb7701005664544494b7956a51e/httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0", size = 146354 }, + { url = "https://files.pythonhosted.org/packages/f8/5d/9ad32b79b6c24524087e78aa3f0a2dfcf58c11c90e090e4593b35def8a86/httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2", size = 75785 }, + { url = "https://files.pythonhosted.org/packages/d0/a4/b503851c40f20bcbd453db24ed35d961f62abdae0dccc8f672cd5d350d87/httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90", size = 345396 }, + { url = "https://files.pythonhosted.org/packages/a2/9a/aa406864f3108e06f7320425a528ff8267124dead1fd72a3e9da2067f893/httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503", size = 344741 }, + { url = "https://files.pythonhosted.org/packages/cf/3a/3fd8dfb987c4247651baf2ac6f28e8e9f889d484ca1a41a9ad0f04dfe300/httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84", size = 345096 }, + { url = "https://files.pythonhosted.org/packages/80/01/379f6466d8e2edb861c1f44ccac255ed1f8a0d4c5c666a1ceb34caad7555/httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb", size = 343535 }, + { url = "https://files.pythonhosted.org/packages/d3/97/60860e9ee87a7d4712b98f7e1411730520053b9d69e9e42b0b9751809c17/httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949", size = 55660 }, +] + +[[package]] +name = "httpx" +version = "0.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpcore", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/3da5bdf4408b8b2800061c339f240c1802f2e82d55e50bd39c5a881f47f0/httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5", size = 126413 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/7b/ddacf6dcebb42466abd03f368782142baa82e08fc0c1f8eaa05b4bae87d5/httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5", size = 75590 }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "huggingface-hub" +version = "0.24.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "fsspec", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/24/b98fce967b7d63700e5805b915012ba25bb538a81fcf11e97f3cc3f4f012/huggingface_hub-0.24.6.tar.gz", hash = "sha256:cc2579e761d070713eaa9c323e3debe39d5b464ae3a7261c39a9195b27bb8000", size = 349200 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/8f/d6718641c14d98a5848c6a24d2376028d292074ffade0702940a4b1dde76/huggingface_hub-0.24.6-py3-none-any.whl", hash = "sha256:a990f3232aa985fe749bc9474060cbad75e8b2f115f6665a9fda5b9c97818970", size = 417509 }, +] + +[[package]] +name = "humanfriendly" +version = "10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyreadline3", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, +] + +[[package]] +name = "hyperframe" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/2a/4747bff0a17f7281abe73e955d60d80aae537a5d203f417fa1c2e7578ebb/hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914", size = 25008 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/de/85a784bcc4a3779d1753a7ec2dee5de90e18c7bcf402e71b51fcf150b129/hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15", size = 12389 }, +] + +[[package]] +name = "identify" +version = "2.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/32/f4/8e8f7db397a7ce20fbdeac5f25adaf567fc362472432938d25556008e03a/identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf", size = 99116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/6c/a4f39abe7f19600b74528d0c717b52fff0b300bb0161081510d39c53cb00/identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0", size = 98962 }, +] + +[[package]] +name = "idna" +version = "3.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/ed/f86a79a07470cb07819390452f178b3bef1d375f2ec021ecfc709fc7cf07/idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc", size = 189575 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/3e/741d8c82801c347547f8a2a06aa57dbb1992be9e948df2ea0eda2c8b79e8/idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0", size = 66836 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/ff/bd28f70283b9cca0cbf0c2a6082acbecd822d1962ae7b2a904861b9965f8/importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812", size = 52667 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/ef/38766b2edb096260d9b1b6ad35adaa0bce3b0567abb452b21eb074af88c4/importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f", size = 24769 }, +] + +[[package]] +name = "importlib-resources" +version = "6.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/b3/0412c28d21e31447e97728efcf8913afe1936692917629e6bdb847563484/importlib_resources-6.4.3.tar.gz", hash = "sha256:4a202b9b9d38563b46da59221d77bb73862ab5d79d461307bcb826d725448b98", size = 42026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/8b/e848c888201b211159cfceaac65cc3bc1e32ed9ab6ca30366c43e5f1969b/importlib_resources-6.4.3-py3-none-any.whl", hash = "sha256:2d6dfe3b9e055f72495c2085890837fc8c758984e209115c8792bddcb762cd93", size = 35265 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "ipykernel" +version = "6.29.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appnope", marker = "(platform_system == 'Darwin' and sys_platform == 'darwin') or (platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform == 'win32')" }, + { name = "comm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "debugpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "ipython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jupyter-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jupyter-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "matplotlib-inline", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nest-asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "psutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyzmq", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tornado", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173 }, +] + +[[package]] +name = "ipython" +version = "8.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "jedi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "matplotlib-inline", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pexpect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, + { name = "prompt-toolkit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "stack-data", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.12' and sys_platform == 'darwin') or (python_full_version < '3.12' and sys_platform == 'linux') or (python_full_version < '3.12' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/f4/dc45805e5c3e327a626139c023b296bafa4537e602a61055d377704ca54c/ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c", size = 5493422 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/48/4d2818054671bb272d1b12ca65748a4145dc602a463683b5c21b260becee/ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff", size = 817939 }, +] + +[[package]] +name = "isodate" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/7a/c0a56c7d56c7fa723988f122fa1f1ccf8c5c4ccc48efad0d214b49e5b1af/isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9", size = 28443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/85/7882d311924cbcfc70b1890780763e36ff0b140c7e51c110fc59a532f087/isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96", size = 41722 }, +] + +[[package]] +name = "jedi" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/99/99b493cec4bf43176b678de30f81ed003fd6a647a301b9c927280c600f0a/jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd", size = 1227821 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/9f/bc63f0f0737ad7a60800bfd472a4836661adae21f9c2535f3957b1e54ceb/jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0", size = 1569361 }, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/55/39036716d19cab0747a5020fc7e907f362fbf48c984b14e62127f7e68e5d/jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369", size = 240245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, +] + +[[package]] +name = "jiter" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/1a/aa64be757afc614484b370a4d9fc1747dc9237b37ce464f7f9d9ca2a3d38/jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a", size = 158300 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/09/f659fc67d6aaa82c56432c4a7cc8365fff763acbf1c8f24121076617f207/jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f", size = 284126 }, + { url = "https://files.pythonhosted.org/packages/07/2d/5bdaddfefc44f91af0f3340e75ef327950d790c9f86490757ac8b395c074/jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5", size = 299265 }, + { url = "https://files.pythonhosted.org/packages/74/bd/964485231deaec8caa6599f3f27c8787a54e9f9373ae80dcfbda2ad79c02/jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28", size = 332178 }, + { url = "https://files.pythonhosted.org/packages/cf/4f/6353179174db10254549bbf2eb2c7ea102e59e0460ee374adb12071c274d/jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e", size = 342533 }, + { url = "https://files.pythonhosted.org/packages/76/6f/21576071b8b056ef743129b9dacf9da65e328b58766f3d1ea265e966f000/jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a", size = 363469 }, + { url = "https://files.pythonhosted.org/packages/73/a1/9ef99a279c72a031dbe8a4085db41e3521ae01ab0058651d6ccc809a5e93/jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749", size = 379078 }, + { url = "https://files.pythonhosted.org/packages/41/6a/c038077509d67fe876c724bfe9ad15334593851a7def0d84518172bdd44a/jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc", size = 318943 }, + { url = "https://files.pythonhosted.org/packages/67/0d/d82673814eb38c208b7881581df596e680f8c2c003e2b80c25ca58975ee4/jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d", size = 357394 }, + { url = "https://files.pythonhosted.org/packages/56/9e/cbd8f6612346c38cc42e41e35cda19ce78f5b12e4106d1186e8e95ee839b/jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87", size = 511080 }, + { url = "https://files.pythonhosted.org/packages/ff/33/135c0c33565b6d5c3010d047710837427dd24c9adbc9ca090f3f92df446e/jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e", size = 492827 }, + { url = "https://files.pythonhosted.org/packages/68/c1/491a8ef682508edbaf2a32e41c1b1e34064078b369b0c2d141170999d1c9/jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf", size = 195081 }, + { url = "https://files.pythonhosted.org/packages/31/20/8cda4faa9571affea6130b150289522a22329778bdfa45a7aab4e7edff95/jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e", size = 190977 }, + { url = "https://files.pythonhosted.org/packages/94/5f/3ac960ed598726aae46edea916e6df4df7ff6fe084bc60774b95cf3154e6/jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553", size = 284131 }, + { url = "https://files.pythonhosted.org/packages/03/eb/2308fa5f5c14c97c4c7720fef9465f1fa0771826cddb4eec9866bdd88846/jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3", size = 299310 }, + { url = "https://files.pythonhosted.org/packages/3c/f6/dba34ca10b44715fa5302b8e8d2113f72eb00a9297ddf3fa0ae4fd22d1d1/jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6", size = 332282 }, + { url = "https://files.pythonhosted.org/packages/69/f7/64e0a7439790ec47f7681adb3871c9d9c45fff771102490bbee5e92c00b7/jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4", size = 342370 }, + { url = "https://files.pythonhosted.org/packages/55/31/1efbfff2ae8e4d919144c53db19b828049ad0622a670be3bbea94a86282c/jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9", size = 363591 }, + { url = "https://files.pythonhosted.org/packages/30/c3/7ab2ca2276426a7398c6dfb651e38dbc81954c79a3bfbc36c514d8599499/jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614", size = 378551 }, + { url = "https://files.pythonhosted.org/packages/47/e7/5d88031cd743c62199b125181a591b1671df3ff2f6e102df85c58d8f7d31/jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e", size = 319152 }, + { url = "https://files.pythonhosted.org/packages/4c/2d/09ea58e1adca9f0359f3d41ef44a1a18e59518d7c43a21f4ece9e72e28c0/jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06", size = 357377 }, + { url = "https://files.pythonhosted.org/packages/7d/2f/83ff1058cb56fc3ff73e0d3c6440703ddc9cdb7f759b00cfbde8228fc435/jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403", size = 511091 }, + { url = "https://files.pythonhosted.org/packages/ae/c9/4f85f97c9894382ab457382337aea0012711baaa17f2ed55c0ff25f3668a/jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646", size = 492948 }, + { url = "https://files.pythonhosted.org/packages/4d/f2/2e987e0eb465e064c5f52c2f29c8d955452e3b316746e326269263bfb1b7/jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb", size = 195183 }, + { url = "https://files.pythonhosted.org/packages/ab/59/05d1c3203c349b37c4dd28b02b9b4e5915a7bcbd9319173b4548a67d2e93/jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae", size = 191032 }, + { url = "https://files.pythonhosted.org/packages/aa/bd/c3950e2c478161e131bed8cb67c36aed418190e2a961a1c981e69954e54b/jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a", size = 283511 }, + { url = "https://files.pythonhosted.org/packages/80/1c/8ce58d8c37a589eeaaa5d07d131fd31043886f5e77ab50c00a66d869a361/jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df", size = 296974 }, + { url = "https://files.pythonhosted.org/packages/4d/b8/6faeff9eed8952bed93a77ea1cffae7b946795b88eafd1a60e87a67b09e0/jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248", size = 331897 }, + { url = "https://files.pythonhosted.org/packages/4f/54/1d9a2209b46d39ce6f0cef3ad87c462f9c50312ab84585e6bd5541292b35/jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544", size = 342962 }, + { url = "https://files.pythonhosted.org/packages/2a/de/90360be7fc54b2b4c2dfe79eb4ed1f659fce9c96682e6a0be4bbe71371f7/jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba", size = 363844 }, + { url = "https://files.pythonhosted.org/packages/ba/ad/ef32b173191b7a53ea8a6757b80723cba321f8469834825e8c71c96bde17/jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f", size = 378709 }, + { url = "https://files.pythonhosted.org/packages/07/de/353ce53743c0defbbbd652e89c106a97dbbac4eb42c95920b74b5056b93a/jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e", size = 319038 }, + { url = "https://files.pythonhosted.org/packages/3f/92/42d47310bf9530b9dece9e2d7c6d51cf419af5586ededaf5e66622d160e2/jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a", size = 357763 }, + { url = "https://files.pythonhosted.org/packages/bd/8c/2bb76a9a84474d48fdd133d3445db8a4413da4e87c23879d917e000a9d87/jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e", size = 511031 }, + { url = "https://files.pythonhosted.org/packages/33/4f/9f23d79c0795e0a8e56e7988e8785c2dcda27e0ed37977256d50c77c6a19/jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338", size = 493042 }, + { url = "https://files.pythonhosted.org/packages/df/67/8a4f975aa834b8aecdb6b131422390173928fd47f42f269dcc32034ab432/jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4", size = 195405 }, + { url = "https://files.pythonhosted.org/packages/15/81/296b1e25c43db67848728cdab34ac3eb5c5cbb4955ceb3f51ae60d4a5e3d/jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5", size = 189720 }, +] + +[[package]] +name = "joblib" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/64/33/60135848598c076ce4b231e1b1895170f45fbcaeaa2c9d5e38b04db70c35/joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e", size = 2116621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/29/df4b9b42f2be0b623cbd5e2140cafcaa2bef0759a00b7b70104dcfe2fb51/joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6", size = 301817 }, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jsonschema-specifications", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "referencing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rpds-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 }, +] + +[[package]] +name = "jsonschema-path" +version = "0.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pathable", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "referencing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/39/3a58b63a997b0cf824536d6f84fff82645a1ca8de222ee63586adab44dfa/jsonschema_path-0.3.3.tar.gz", hash = "sha256:f02e5481a4288ec062f8e68c808569e427d905bedfecb7f2e4c69ef77957c382", size = 11589 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/b0/69237e85976916b2e37586b7ddc48b9547fc38b440e25103d084b2b02ab3/jsonschema_path-0.3.3-py3-none-any.whl", hash = "sha256:203aff257f8038cd3c67be614fe6b2001043408cb1b4e36576bc4921e09d83c4", size = 14817 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b9/cc0cc592e7c195fb8a650c1d5990b10175cf13b4c97465c72ec841de9e4b/jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc", size = 13983 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/07/44bd408781594c4d0a027666ef27fab1e441b109dc3b76b4f836f8fd04fe/jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c", size = 18482 }, +] + +[[package]] +name = "jupyter-client" +version = "8.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyzmq", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tornado", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/61/3cd51dea7878691919adc34ff6ad180f13bfe25fb8c7662a9ee6dc64e643/jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df", size = 341102 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/d3/c4bb02580bc0db807edb9a29b2d0c56031be1ef0d804336deb2699a470f6/jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f", size = 105901 }, +] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/11/b56381fa6c3f4cc5d2cf54a7dbf98ad9aa0b339ef7a601d6053538b079a7/jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9", size = 87629 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965 }, +] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884 }, +] + +[[package]] +name = "kubernetes" +version = "30.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-auth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "oauthlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests-oauthlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "websocket-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/3c/9f29f6cab7f35df8e54f019e5719465fa97b877be2454e99f989270b4f34/kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc", size = 887810 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/2027ddede72d33be2effc087580aeba07e733a7360780ae87226f1f91bd8/kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d", size = 1706042 }, +] + +[[package]] +name = "lazy-object-proxy" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/f0/f02e2d150d581a294efded4020094a371bbab42423fe78625ac18854d89b/lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69", size = 43271 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/42/a96d9d153f6ea38b925494cb9b42cf4a9f98fd30cad3124fc22e9d04ec34/lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977", size = 27432 }, + { url = "https://files.pythonhosted.org/packages/4a/0d/b325461e43dde8d7644e9b9e9dd57f2a4af472b588c51ccbc92778e60ea4/lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3", size = 69133 }, + { url = "https://files.pythonhosted.org/packages/8b/fc/83711d743fb5aaca5747bbf225fe3b5cbe085c7f6c115856b5cce80f3224/lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05", size = 68272 }, + { url = "https://files.pythonhosted.org/packages/8d/b5/ea47215abd4da45791664d7bbfe2976ca0de2c37af38b5e9e6cf89e0e65e/lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895", size = 70891 }, + { url = "https://files.pythonhosted.org/packages/8b/9b/908e12e5fa265ea1579261ff80f7b2136fd2ba254bc7f4f7e3dba83fd0f2/lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83", size = 70451 }, + { url = "https://files.pythonhosted.org/packages/16/ab/d9a47f2e70767af5ee311d71109be6ef2991c66c77bfa18e66707edd9f8c/lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9", size = 25778 }, + { url = "https://files.pythonhosted.org/packages/74/d6/0104e4154d2c30227eb54491dda8a4132be046b4cb37fb4ce915a5abc0d5/lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4", size = 27551 }, + { url = "https://files.pythonhosted.org/packages/ff/e1/99a7ec68b892c9b8c6212617f54e7e9b0304d47edad8c0ff043ae3aeb1a9/lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c", size = 27434 }, + { url = "https://files.pythonhosted.org/packages/1a/76/6a41de4b44d1dcfe4c720d4606de0d7b69b6b450f0bdce16f2e1fb8abc89/lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4", size = 70687 }, + { url = "https://files.pythonhosted.org/packages/1e/5d/eaa12126e8989c9bdd21d864cbba2b258cb9ee2f574ada1462a0004cfad8/lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56", size = 69757 }, + { url = "https://files.pythonhosted.org/packages/53/a9/6f22cfe9572929656988b72c0de266c5d10755369b575322725f67364c4e/lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9", size = 73709 }, + { url = "https://files.pythonhosted.org/packages/bd/e6/b10fd94710a99a6309f3ad61a4eb480944bbb17fcb41bd2d852fdbee57ee/lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f", size = 73191 }, + { url = "https://files.pythonhosted.org/packages/c9/78/a9b9d314da02fe66b632f2354e20e40fc3508befb450b5a17987a222b383/lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03", size = 25773 }, + { url = "https://files.pythonhosted.org/packages/94/e6/e2d3b0c9efe61f72dc327ce2355941f540e0b0d1f2b3490cbab6bab7d3ea/lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6", size = 27550 }, + { url = "https://files.pythonhosted.org/packages/d0/5d/768a7f2ccebb29604def61842fd54f6f5f75c79e366ee8748dda84de0b13/lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba", size = 27560 }, + { url = "https://files.pythonhosted.org/packages/b3/ce/f369815549dbfa4bebed541fa4e1561d69e4f268a1f6f77da886df182dab/lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43", size = 72403 }, + { url = "https://files.pythonhosted.org/packages/44/46/3771e0a4315044aa7b67da892b2fb1f59dfcf0eaff2c8967b2a0a85d5896/lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9", size = 72401 }, + { url = "https://files.pythonhosted.org/packages/81/39/84ce4740718e1c700bd04d3457ac92b2e9ce76529911583e7a2bf4d96eb2/lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3", size = 75375 }, + { url = "https://files.pythonhosted.org/packages/86/3b/d6b65da2b864822324745c0a73fe7fd86c67ccea54173682c3081d7adea8/lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b", size = 75466 }, + { url = "https://files.pythonhosted.org/packages/f5/33/467a093bf004a70022cb410c590d937134bba2faa17bf9dc42a48f49af35/lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074", size = 25914 }, + { url = "https://files.pythonhosted.org/packages/77/ce/7956dc5ac2f8b62291b798c8363c81810e22a9effe469629d297d087e350/lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282", size = 27525 }, + { url = "https://files.pythonhosted.org/packages/31/8b/94dc8d58704ab87b39faed6f2fc0090b9d90e2e2aa2bbec35c79f3d2a054/lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d", size = 16405 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "2.1.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/5b/aae44c6655f3801e81aa3eef09dbbf012431987ba564d7231722f68df02d/MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b", size = 19384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/54/ad5eb37bf9d51800010a74e4665425831a9db4e7c4e0fde4352e391e808e/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc", size = 18206 }, + { url = "https://files.pythonhosted.org/packages/6a/4a/a4d49415e600bacae038c67f9fecc1d5433b9d3c71a4de6f33537b89654c/MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5", size = 14079 }, + { url = "https://files.pythonhosted.org/packages/0a/7b/85681ae3c33c385b10ac0f8dd025c30af83c78cec1c37a6aa3b55e67f5ec/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46", size = 26620 }, + { url = "https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f", size = 25818 }, + { url = "https://files.pythonhosted.org/packages/29/fe/a36ba8c7ca55621620b2d7c585313efd10729e63ef81e4e61f52330da781/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900", size = 25493 }, + { url = "https://files.pythonhosted.org/packages/60/ae/9c60231cdfda003434e8bd27282b1f4e197ad5a710c14bee8bea8a9ca4f0/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff", size = 30630 }, + { url = "https://files.pythonhosted.org/packages/65/dc/1510be4d179869f5dafe071aecb3f1f41b45d37c02329dfba01ff59e5ac5/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad", size = 29745 }, + { url = "https://files.pythonhosted.org/packages/30/39/8d845dd7d0b0613d86e0ef89549bfb5f61ed781f59af45fc96496e897f3a/MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd", size = 30021 }, + { url = "https://files.pythonhosted.org/packages/c7/5c/356a6f62e4f3c5fbf2602b4771376af22a3b16efa74eb8716fb4e328e01e/MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4", size = 16659 }, + { url = "https://files.pythonhosted.org/packages/69/48/acbf292615c65f0604a0c6fc402ce6d8c991276e16c80c46a8f758fbd30c/MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5", size = 17213 }, + { url = "https://files.pythonhosted.org/packages/11/e7/291e55127bb2ae67c64d66cef01432b5933859dfb7d6949daa721b89d0b3/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f", size = 18219 }, + { url = "https://files.pythonhosted.org/packages/6b/cb/aed7a284c00dfa7c0682d14df85ad4955a350a21d2e3b06d8240497359bf/MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2", size = 14098 }, + { url = "https://files.pythonhosted.org/packages/1c/cf/35fe557e53709e93feb65575c93927942087e9b97213eabc3fe9d5b25a55/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced", size = 29014 }, + { url = "https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5", size = 28220 }, + { url = "https://files.pythonhosted.org/packages/0c/40/2e73e7d532d030b1e41180807a80d564eda53babaf04d65e15c1cf897e40/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c", size = 27756 }, + { url = "https://files.pythonhosted.org/packages/18/46/5dca760547e8c59c5311b332f70605d24c99d1303dd9a6e1fc3ed0d73561/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f", size = 33988 }, + { url = "https://files.pythonhosted.org/packages/6d/c5/27febe918ac36397919cd4a67d5579cbbfa8da027fa1238af6285bb368ea/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a", size = 32718 }, + { url = "https://files.pythonhosted.org/packages/f8/81/56e567126a2c2bc2684d6391332e357589a96a76cb9f8e5052d85cb0ead8/MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f", size = 33317 }, + { url = "https://files.pythonhosted.org/packages/00/0b/23f4b2470accb53285c613a3ab9ec19dc944eaf53592cb6d9e2af8aa24cc/MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906", size = 16670 }, + { url = "https://files.pythonhosted.org/packages/b7/a2/c78a06a9ec6d04b3445a949615c4c7ed86a0b2eb68e44e7541b9d57067cc/MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617", size = 17224 }, + { url = "https://files.pythonhosted.org/packages/53/bd/583bf3e4c8d6a321938c13f49d44024dbe5ed63e0a7ba127e454a66da974/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1", size = 18215 }, + { url = "https://files.pythonhosted.org/packages/48/d6/e7cd795fc710292c3af3a06d80868ce4b02bfbbf370b7cee11d282815a2a/MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4", size = 14069 }, + { url = "https://files.pythonhosted.org/packages/51/b5/5d8ec796e2a08fc814a2c7d2584b55f889a55cf17dd1a90f2beb70744e5c/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee", size = 29452 }, + { url = "https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5", size = 28462 }, + { url = "https://files.pythonhosted.org/packages/2d/75/fd6cb2e68780f72d47e6671840ca517bda5ef663d30ada7616b0462ad1e3/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b", size = 27869 }, + { url = "https://files.pythonhosted.org/packages/b0/81/147c477391c2750e8fc7705829f7351cf1cd3be64406edcf900dc633feb2/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a", size = 33906 }, + { url = "https://files.pythonhosted.org/packages/8b/ff/9a52b71839d7a256b563e85d11050e307121000dcebc97df120176b3ad93/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f", size = 32296 }, + { url = "https://files.pythonhosted.org/packages/88/07/2dc76aa51b481eb96a4c3198894f38b480490e834479611a4053fbf08623/MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169", size = 33038 }, + { url = "https://files.pythonhosted.org/packages/96/0c/620c1fb3661858c0e37eb3cbffd8c6f732a67cd97296f725789679801b31/MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad", size = 16572 }, + { url = "https://files.pythonhosted.org/packages/3f/14/c3554d512d5f9100a95e737502f4a2323a1959f6d0d01e0d0997b35f7b10/MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb", size = 17127 }, +] + +[[package]] +name = "marshmallow" +version = "3.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/40/faa10dc4500bca85f41ca9d8cefab282dd23d0fcc7a9b5fab40691e72e76/marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e", size = 176836 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/78/c1de55eb3311f2c200a8b91724414b8d6f5ae78891c15d9d936ea43c3dba/marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9", size = 49334 }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "milvus" +version = "2.3.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/b7/c82bef4474045a82d204eaf48f100e28b281920377c399abcf327b9ba6ac/milvus-2.3.5-py3-none-macosx_12_0_arm64.whl", hash = "sha256:328d2ba24fb04a595f47ab226abf5565691bfe242beb88e61b31326d0416bf1a", size = 37754340 }, + { url = "https://files.pythonhosted.org/packages/fa/a2/67dccec2690afac9c738c70bd2f4b5b58c9845bc1b2b0764a7f8470de602/milvus-2.3.5-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:e35a8d6397da1f0f685d0f55afad8654296ff3b3aea296439e53ce9980d1ad22", size = 41879314 }, + { url = "https://files.pythonhosted.org/packages/bd/ed/e216ec677abac11b49bbcc35c3eadf48e6db832e8e4f368f8eed34f23cec/milvus-2.3.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:69515a0630ce29fd10e101fa442afea8ca1387b93a456cd9bd41fdf3deb93d04", size = 57692521 }, +] + +[[package]] +name = "minio" +version = "7.2.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argon2-cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pycryptodome", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/be/6ddefcacca569bc1199cf8796fef891e67596ae30d865ea27e86b247ca4f/minio-7.2.8.tar.gz", hash = "sha256:f8af2dafc22ebe1aef3ac181b8e217037011c430aa6da276ed627e55aaf7c815", size = 135078 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/89/f4d5cfb0a5494e7dae1c11d6d1ab82811d93f6af8ca54e1393c046ff0e75/minio-7.2.8-py3-none-any.whl", hash = "sha256:aa3b485788b63b12406a5798465d12a57e4be2ac2a58a8380959b6b748e64ddd", size = 93488 }, +] + +[[package]] +name = "mistralai" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "orjson", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fa/20/4204f461588310b3a7ffbbbb7fa573493dc1c8185d376ee72516c04575bf/mistralai-0.4.2.tar.gz", hash = "sha256:5eb656710517168ae053f9847b0bb7f617eda07f1f93f946ad6c91a4d407fd93", size = 14234 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/fe/79dad76b8d94b62d9e2aab8446183190e1dc384c617d06c3c93307850e11/mistralai-0.4.2-py3-none-any.whl", hash = "sha256:63c98eea139585f0a3b2c4c6c09c453738bac3958055e6f2362d3866e96b0168", size = 20334 }, +] + +[[package]] +name = "mistune" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/c8/f0173fe3bf85fd891aee2e7bcd8207dfe26c2c683d727c5a6cc3aec7b628/mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8", size = 90840 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/74/c95adcdf032956d9ef6c89a9b8a5152bf73915f8c633f3e3d88d06bd699c/mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205", size = 47958 }, +] + +[[package]] +name = "mmh3" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/63/96/aa247e82878b123468f0079ce2ac77e948315bab91ce45d2934a62e0af95/mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a", size = 26357 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/5a/8609dc74421858f7e94a89dc69221ab9b2c14d0d63a139b46ec190eedc44/mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a", size = 39433 }, + { url = "https://files.pythonhosted.org/packages/93/6c/e7a0f07c7082c76964b1ff46aa852f36e2ec6a9c3530dec0afa0b3162fc2/mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c", size = 29280 }, + { url = "https://files.pythonhosted.org/packages/76/84/60ca728ec7d7e1779a98000d64941c6221786124b4f07bf105a627055890/mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b", size = 30130 }, + { url = "https://files.pythonhosted.org/packages/2a/22/f2ec190b491f712d9ef5ea6252204b6f05255ac9af54a7b505adc3128aed/mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437", size = 68837 }, + { url = "https://files.pythonhosted.org/packages/ae/b9/c1e8065671e1d2f4e280c9c57389e74964f4a5792cac26717ad592002c7d/mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39", size = 72275 }, + { url = "https://files.pythonhosted.org/packages/6b/18/92bbdb102ab2b4e80084e927187d871758280eb067c649693e42bfc6d0d1/mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6", size = 70919 }, + { url = "https://files.pythonhosted.org/packages/e2/cd/391ce1d1bb559871a5d3a6bbb30b82bf51d3e3b42c4e8589cccb201953da/mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b", size = 65885 }, + { url = "https://files.pythonhosted.org/packages/03/87/4b01a43336bd506478850d1bc3d180648b2d26b4acf1fc4bf1df72bf562f/mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05", size = 67610 }, + { url = "https://files.pythonhosted.org/packages/e8/12/b464149a1b7181c7ce431ebf3d24fa994863f2f1abc75b78d202dde966e0/mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a", size = 74888 }, + { url = "https://files.pythonhosted.org/packages/fc/3e/f4eb45a23fc17b970394c1fe74eba157514577ae2d63757684241651d754/mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6", size = 72969 }, + { url = "https://files.pythonhosted.org/packages/c0/3b/83934fd9494371357da0ca026d55ad427c199d611b97b6ffeecacfd8e720/mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b", size = 80338 }, + { url = "https://files.pythonhosted.org/packages/b6/c4/5bcd709ea7269173d7e925402f05e05cf12194ef53cc9912a5ad166f8ded/mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970", size = 76580 }, + { url = "https://files.pythonhosted.org/packages/da/6a/4c0680d64475e551d7f4cc78bf0fd247c711ed2717f6bb311934993d1e69/mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da", size = 75325 }, + { url = "https://files.pythonhosted.org/packages/70/bc/e2ed99e580b3dd121f6462147bd5f521c57b3c81c692aa2d416b0678c89f/mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47", size = 31235 }, + { url = "https://files.pythonhosted.org/packages/73/2b/3aec865da7feb52830782d9fb7c54115cc18815680c244301adf9080622f/mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865", size = 31271 }, + { url = "https://files.pythonhosted.org/packages/17/2a/925439189ccf562bdcb839aed6263d718359f0c376d673beb3b83d3864ac/mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00", size = 30147 }, + { url = "https://files.pythonhosted.org/packages/2e/d6/86beea107e7e9700df9522466346c23a2f54faa81337c86fd17002aa95a6/mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6", size = 39427 }, + { url = "https://files.pythonhosted.org/packages/1c/08/65fa5489044e2afc304e8540c6c607d5d7b136ddc5cd8315c13de0adc34c/mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896", size = 29281 }, + { url = "https://files.pythonhosted.org/packages/b3/aa/98511d3ea3f6ba958136d913be3be3c1009be935a20ecc7b2763f0a605b6/mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0", size = 30130 }, + { url = "https://files.pythonhosted.org/packages/3c/b7/1a93f81643435b0e57f1046c4ffe46f0214693eaede0d9b0a1a236776e70/mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14", size = 69072 }, + { url = "https://files.pythonhosted.org/packages/45/9e/2ff70246aefd9cf146bc6a420c28ed475a0d1a325f31ee203be02f9215d4/mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e", size = 72470 }, + { url = "https://files.pythonhosted.org/packages/dc/cb/57bc1fdbdbe6837aebfca982494e23e2498ee2a89585c9054713b22e4167/mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13", size = 71251 }, + { url = "https://files.pythonhosted.org/packages/4d/c2/46d7d2721b69fbdfd30231309e6395f62ff6744e5c00dd8113b9faa06fba/mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560", size = 66035 }, + { url = "https://files.pythonhosted.org/packages/6f/a4/7ba4bcc838818bcf018e26d118d5ddb605c23c4fad040dc4d811f1cfcb04/mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f", size = 67844 }, + { url = "https://files.pythonhosted.org/packages/71/ed/8e80d1038e7bb15eaf739711d1fc36f2341acb6b1b95fa77003f2799c91e/mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106", size = 76724 }, + { url = "https://files.pythonhosted.org/packages/1c/22/a6a70ca81f0ce8fe2f3a68d89c1184c2d2d0fbe0ee305da50e972c5ff9fa/mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db", size = 75004 }, + { url = "https://files.pythonhosted.org/packages/73/20/abe50b605760f1f5b6e0b436c650649e69ca478d0f41b154f300367c09e4/mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea", size = 82230 }, + { url = "https://files.pythonhosted.org/packages/45/80/a1fc99d3ee50b573df0bfbb1ad518463af78d2ebca44bfca3b3f9473d651/mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9", size = 78679 }, + { url = "https://files.pythonhosted.org/packages/9e/51/6c9ee2ddf3b386f45ff83b6926a5e826635757d91dab04cbf16eee05f9a7/mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb", size = 77382 }, + { url = "https://files.pythonhosted.org/packages/ee/fa/4b377f244c27fac5f0343cc4dc0d2eb0a08049afc8d5322d07be7461a768/mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f", size = 31232 }, + { url = "https://files.pythonhosted.org/packages/d1/b0/500ef56c29b276d796bfdb47c16d34fa18a68945e4d730a6fa7d483583ed/mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec", size = 31276 }, + { url = "https://files.pythonhosted.org/packages/cc/84/94795e6e710c3861f8f355a12be9c9f4b8433a538c983e75bd4c00496a8a/mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6", size = 30142 }, + { url = "https://files.pythonhosted.org/packages/18/45/b4d41e86b00eed8c500adbe0007129861710e181c7f49c507ef6beae9496/mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c", size = 39495 }, + { url = "https://files.pythonhosted.org/packages/a6/d4/f041b8704cb8d1aad3717105daa582e29818b78a540622dfed84cd00d88f/mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5", size = 29334 }, + { url = "https://files.pythonhosted.org/packages/cb/bb/8f75378e1a83b323f9ed06248333c383e7dac614c2f95e1419965cb91693/mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2", size = 30144 }, + { url = "https://files.pythonhosted.org/packages/3e/50/5e36c1945bd83e780a37361fc1999fc4c5a59ecc10a373557fdf0e58eb1f/mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d", size = 69094 }, + { url = "https://files.pythonhosted.org/packages/70/c7/6ae37e7519a938226469476b84bcea2650e2a2cc7a848e6a206ea98ecee3/mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0", size = 72611 }, + { url = "https://files.pythonhosted.org/packages/5e/47/6613f69f57f1e5045e66b22fae9c2fb39ef754c455805d3917f6073e316e/mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2", size = 71462 }, + { url = "https://files.pythonhosted.org/packages/e0/0a/e423db18ce7b479c4b96381a112b443f0985c611de420f95c58a9f934080/mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5", size = 66165 }, + { url = "https://files.pythonhosted.org/packages/4c/7b/bfeb68bee5bddc8baf7ef630b93edc0a533202d84eb076dbb6c77e7e5fd5/mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3", size = 68088 }, + { url = "https://files.pythonhosted.org/packages/d4/a6/b82e30143997c05776887f5177f724e3b714aa7e7346fbe2ec70f52abcd0/mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828", size = 76241 }, + { url = "https://files.pythonhosted.org/packages/6c/60/a3d5872cf7610fcb13e36c472476020c5cf217b23c092bad452eb7784407/mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5", size = 74538 }, + { url = "https://files.pythonhosted.org/packages/f6/d5/742173a94c78f4edab71c04097f6f9150c47f8fd034d592f5f34a9444719/mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe", size = 81793 }, + { url = "https://files.pythonhosted.org/packages/d0/7a/a1db0efe7c67b761d83be3d50e35ef26628ef56b3b8bc776d07412ee8b16/mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740", size = 78217 }, + { url = "https://files.pythonhosted.org/packages/b3/78/1ff8da7c859cd09704e2f500588d171eda9688fcf6f29e028ef261262a16/mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086", size = 77052 }, + { url = "https://files.pythonhosted.org/packages/ed/c7/cf16ace81fc9fbe54a75c914306252af26c6ea485366bb3b579bf6e3dbb8/mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276", size = 31277 }, + { url = "https://files.pythonhosted.org/packages/d2/0b/b3b1637dca9414451edf287fd91e667e7231d5ffd7498137fe011951fc0a/mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9", size = 31318 }, + { url = "https://files.pythonhosted.org/packages/dd/6c/c0f06040c58112ccbd0df989055ede98f7c1a1f392dc6a3fc63ec6c124ec/mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3", size = 30147 }, +] + +[[package]] +name = "monotonic" +version = "1.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/ca/8e91948b782ddfbd194f323e7e7d9ba12e5877addf04fb2bf8fca38e86ac/monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7", size = 7615 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/67/7e8406a29b6c45be7af7740456f7f37025f0506ae2e05fb9009a53946860/monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c", size = 8154 }, +] + +[[package]] +name = "more-itertools" +version = "10.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/0d/ad6a82320cb8eba710fd0dceb0f678d5a1b58d67d03ae5be14874baa39e0/more-itertools-10.4.0.tar.gz", hash = "sha256:fe0e63c4ab068eac62410ab05cccca2dc71ec44ba8ef29916a0090df061cf923", size = 120755 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/0b/6a51175e1395774449fca317fb8861379b7a2d59be411b8cce3d19d6ce78/more_itertools-10.4.0-py3-none-any.whl", hash = "sha256:0f7d9f83a0a8dcfa8a2694a770590d98a67ea943e3d9f5298309a484758c4e27", size = 60935 }, +] + +[[package]] +name = "motor" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pymongo", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/e3/f5244c84d7bdc149d99f9baa4313f197f7d14cfa1bfe1a6ac181e10cb3e2/motor-3.3.2.tar.gz", hash = "sha256:d2fc38de15f1c8058f389c1a44a4d4105c0405c48c061cd492a654496f7bc26a", size = 272583 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/9a/1a43a329dffbd1a631c52e64c1e9c036621afdfd7f42096ae4bf2de4132b/motor-3.3.2-py3-none-any.whl", hash = "sha256:6fe7e6f0c4f430b9e030b9d22549b732f7c2226af3ab71ecc309e4a1b7d19953", size = 70598 }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, +] + +[[package]] +name = "msal" +version = "1.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyjwt", extra = ["crypto"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/ce/45b9af8f43fbbf34d15162e1e39ce34b675c234c56638277cc05562b6dbf/msal-1.30.0.tar.gz", hash = "sha256:b4bf00850092e465157d814efa24a18f788284c9a479491024d62903085ea2fb", size = 142510 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/82/8f19334da43b7ef72d995587991a446f140346d76edb96a2c1a2689588e9/msal-1.30.0-py3-none-any.whl", hash = "sha256:423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de", size = 111760 }, +] + +[[package]] +name = "msal-extensions" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msal", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "portalocker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2d/38/ad49272d0a5af95f7a0cb64a79bbd75c9c187f3b789385a143d8d537a5eb/msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef", size = 22391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/69/314d887a01599669fb330da14e5c6ff5f138609e322812a942a74ef9b765/msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d", size = 19254 }, +] + +[[package]] +name = "multidict" +version = "6.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/79/722ca999a3a09a63b35aac12ec27dfa8e5bb3a38b0f857f7a1a209a88836/multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da", size = 59867 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/36/48097b96135017ed1b806c5ea27b6cdc2ed3a6861c5372b793563206c586/multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9", size = 50955 }, + { url = "https://files.pythonhosted.org/packages/d9/48/037440edb5d4a1c65e002925b2f24071d6c27754e6f4734f63037e3169d6/multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604", size = 30361 }, + { url = "https://files.pythonhosted.org/packages/a4/eb/d8e7693c9064554a1585698d1902839440c6c695b0f53c9a8be5d9d4a3b8/multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600", size = 30508 }, + { url = "https://files.pythonhosted.org/packages/f3/7d/fe7648d4b2f200f8854066ce6e56bf51889abfaf859814c62160dd0e32a9/multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c", size = 126318 }, + { url = "https://files.pythonhosted.org/packages/8d/ea/0230b6faa9a5bc10650fd50afcc4a86e6c37af2fe05bc679b74d79253732/multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5", size = 133998 }, + { url = "https://files.pythonhosted.org/packages/36/6d/d2f982fb485175727a193b4900b5f929d461e7aa87d6fb5a91a377fcc9c0/multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f", size = 129150 }, + { url = "https://files.pythonhosted.org/packages/33/62/2c9085e571318d51212a6914566fe41dd0e33d7f268f7e2f23dcd3f06c56/multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae", size = 124266 }, + { url = "https://files.pythonhosted.org/packages/ce/e2/88cdfeaf03eab3498f688a19b62ca704d371cd904cb74b682541ca7b20a7/multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182", size = 116637 }, + { url = "https://files.pythonhosted.org/packages/12/4d/99dfc36872dcc53956879f5da80a6505bbd29214cce90ce792a86e15fddf/multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf", size = 155908 }, + { url = "https://files.pythonhosted.org/packages/c2/5c/1e76b2c742cb9e0248d1e8c4ed420817879230c833fa27d890b5fd22290b/multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442", size = 147111 }, + { url = "https://files.pythonhosted.org/packages/bc/84/9579004267e1cc5968ef2ef8718dab9d8950d99354d85b739dd67b09c273/multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a", size = 160502 }, + { url = "https://files.pythonhosted.org/packages/11/b7/bef33e84e3722bc42531af020d7ae8c31235ce8846bacaa852b6484cf868/multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef", size = 156587 }, + { url = "https://files.pythonhosted.org/packages/26/ce/f745a2d6104e56f7fa0d7d0756bb9ed27b771dd7b8d9d7348cd7f0f7b9de/multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc", size = 151948 }, + { url = "https://files.pythonhosted.org/packages/f1/50/714da64281d2b2b3b4068e84f115e1ef3bd3ed3715b39503ff3c59e8d30d/multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319", size = 25734 }, + { url = "https://files.pythonhosted.org/packages/ef/3d/ba0dc18e96c5d83731c54129819d5892389e180f54ebb045c6124b2e8b87/multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8", size = 28182 }, + { url = "https://files.pythonhosted.org/packages/5f/da/b10ea65b850b54f44a6479177c6987f456bc2d38f8dc73009b78afcf0ede/multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba", size = 50815 }, + { url = "https://files.pythonhosted.org/packages/21/db/3403263f158b0bc7b0d4653766d71cb39498973f2042eead27b2e9758782/multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e", size = 30269 }, + { url = "https://files.pythonhosted.org/packages/02/c1/b15ecceb6ffa5081ed2ed450aea58d65b0e0358001f2b426705f9f41f4c2/multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd", size = 30500 }, + { url = "https://files.pythonhosted.org/packages/3f/e1/7fdd0f39565df3af87d6c2903fb66a7d529fbd0a8a066045d7a5b6ad1145/multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3", size = 130751 }, + { url = "https://files.pythonhosted.org/packages/76/bc/9f593f9e38c6c09bbf0344b56ad67dd53c69167937c2edadee9719a5e17d/multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf", size = 138185 }, + { url = "https://files.pythonhosted.org/packages/28/32/d7799a208701d537b92705f46c777ded812a6dc139c18d8ed599908f6b1c/multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29", size = 133585 }, + { url = "https://files.pythonhosted.org/packages/52/ec/be54a3ad110f386d5bd7a9a42a4ff36b3cd723ebe597f41073a73ffa16b8/multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed", size = 128684 }, + { url = "https://files.pythonhosted.org/packages/36/e1/a680eabeb71e25d4733276d917658dfa1cd3a99b1223625dbc247d266c98/multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733", size = 120994 }, + { url = "https://files.pythonhosted.org/packages/ef/08/08f4f44a8a43ea4cee13aa9cdbbf4a639af8db49310a0637ca389c4cf817/multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f", size = 159689 }, + { url = "https://files.pythonhosted.org/packages/aa/a9/46cdb4cb40bbd4b732169413f56b04a6553460b22bd914f9729c9ba63761/multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4", size = 150611 }, + { url = "https://files.pythonhosted.org/packages/e9/32/35668bb3e6ab2f12f4e4f7f4000f72f714882a94f904d4c3633fbd036753/multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1", size = 164444 }, + { url = "https://files.pythonhosted.org/packages/fa/10/f1388a91552af732d8ec48dab928abc209e732767e9e8f92d24c3544353c/multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc", size = 160158 }, + { url = "https://files.pythonhosted.org/packages/14/c3/f602601f1819983e018156e728e57b3f19726cb424b543667faab82f6939/multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e", size = 156072 }, + { url = "https://files.pythonhosted.org/packages/82/a6/0290af8487326108c0d03d14f8a0b8b1001d71e4494df5f96ab0c88c0b88/multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c", size = 25731 }, + { url = "https://files.pythonhosted.org/packages/88/aa/ea217cb18325aa05cb3e3111c19715f1e97c50a4a900cbc20e54648de5f5/multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea", size = 28176 }, + { url = "https://files.pythonhosted.org/packages/90/9c/7fda9c0defa09538c97b1f195394be82a1f53238536f70b32eb5399dfd4e/multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e", size = 49575 }, + { url = "https://files.pythonhosted.org/packages/be/21/d6ca80dd1b9b2c5605ff7475699a8ff5dc6ea958cd71fb2ff234afc13d79/multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b", size = 29638 }, + { url = "https://files.pythonhosted.org/packages/9c/18/9565f32c19d186168731e859692dfbc0e98f66a1dcf9e14d69c02a78b75a/multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5", size = 29874 }, + { url = "https://files.pythonhosted.org/packages/4e/4e/3815190e73e6ef101b5681c174c541bf972a1b064e926e56eea78d06e858/multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450", size = 129914 }, + { url = "https://files.pythonhosted.org/packages/0c/08/bb47f886457e2259aefc10044e45c8a1b62f0c27228557e17775869d0341/multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496", size = 134589 }, + { url = "https://files.pythonhosted.org/packages/d5/2f/952f79b5f0795cf4e34852fc5cf4dfda6166f63c06c798361215b69c131d/multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a", size = 133259 }, + { url = "https://files.pythonhosted.org/packages/24/1f/af976383b0b772dd351210af5b60ff9927e3abb2f4a103e93da19a957da0/multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226", size = 130779 }, + { url = "https://files.pythonhosted.org/packages/fc/b1/b0a7744be00b0f5045c7ed4e4a6b8ee6bde4672b2c620474712299df5979/multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271", size = 120125 }, + { url = "https://files.pythonhosted.org/packages/d0/bf/2a1d667acf11231cdf0b97a6cd9f30e7a5cf847037b5cf6da44884284bd0/multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb", size = 167095 }, + { url = "https://files.pythonhosted.org/packages/5e/e8/ad6ee74b1a2050d3bc78f566dabcc14c8bf89cbe87eecec866c011479815/multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef", size = 155823 }, + { url = "https://files.pythonhosted.org/packages/45/7c/06926bb91752c52abca3edbfefac1ea90d9d1bc00c84d0658c137589b920/multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24", size = 170233 }, + { url = "https://files.pythonhosted.org/packages/3c/29/3dd36cf6b9c5abba8b97bba84eb499a168ba59c3faec8829327b3887d123/multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6", size = 169035 }, + { url = "https://files.pythonhosted.org/packages/60/47/9a0f43470c70bbf6e148311f78ef5a3d4996b0226b6d295bdd50fdcfe387/multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda", size = 166229 }, + { url = "https://files.pythonhosted.org/packages/1d/23/c1b7ae7a0b8a3e08225284ef3ecbcf014b292a3ee821bc4ed2185fd4ce7d/multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5", size = 25840 }, + { url = "https://files.pythonhosted.org/packages/4a/68/66fceb758ad7a88993940dbdf3ac59911ba9dc46d7798bf6c8652f89f853/multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556", size = 27905 }, + { url = "https://files.pythonhosted.org/packages/fa/a2/17e1e23c6be0a916219c5292f509360c345b5fa6beeb50d743203c27532c/multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7", size = 9729 }, +] + +[[package]] +name = "mypy" +version = "1.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/9c/a4b3bda53823439cf395db8ecdda6229a83f9bf201714a68a15190bb2919/mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08", size = 3078369 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/ba/858cc9631c24a349c1c63814edc16448da7d6b8716b2c83a10aa20f5ee89/mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c", size = 10937885 }, + { url = "https://files.pythonhosted.org/packages/2d/88/2ae81f7489da8313d0f2043dd657ba847650b00a0fb8e07f40e716ed8c58/mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411", size = 10111978 }, + { url = "https://files.pythonhosted.org/packages/df/4b/d211d6036366f9ea5ee9fb949e80d133b4b8496cdde78c7119f518c49734/mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03", size = 12498441 }, + { url = "https://files.pythonhosted.org/packages/94/d2/973278d03ad11e006d71d4c858bfe45cf571ae061f3997911925c70a59f0/mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4", size = 13020595 }, + { url = "https://files.pythonhosted.org/packages/0b/c2/7f4285eda528883c5c34cb4b8d88080792967f7f7f24256ad8090d303702/mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58", size = 9568307 }, + { url = "https://files.pythonhosted.org/packages/0b/b1/62d8ce619493a5364dda4f410912aa12c27126926e8fb8393edca0664640/mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5", size = 10858723 }, + { url = "https://files.pythonhosted.org/packages/fe/aa/2ad15a318bc6a17b7f23e1641a624603949904f6131e09681f40340fb875/mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca", size = 10038078 }, + { url = "https://files.pythonhosted.org/packages/4d/7f/77feb389d91603f55b3c4e3e16ccf8752bce007ed73ca921e42c9a5dff12/mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de", size = 12420213 }, + { url = "https://files.pythonhosted.org/packages/bc/5b/907b4681f68e7ee2e2e88eed65c514cf6406b8f2f83b243ea79bd4eddb97/mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809", size = 12898278 }, + { url = "https://files.pythonhosted.org/packages/5b/b3/2a83be637825d7432b8e6a51e45d02de4f463b6c7ec7164a45009a7cf477/mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72", size = 9564438 }, + { url = "https://files.pythonhosted.org/packages/3a/34/69638cee2e87303f19a0c35e80d42757e14d9aba328f272fdcdc0bf3c9b8/mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8", size = 10995789 }, + { url = "https://files.pythonhosted.org/packages/c4/3c/3e0611348fc53a4a7c80485959478b4f6eae706baf3b7c03cafa22639216/mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a", size = 10002696 }, + { url = "https://files.pythonhosted.org/packages/1c/21/a6b46c91b4c9d1918ee59c305f46850cde7cbea748635a352e7c3c8ed204/mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417", size = 12505772 }, + { url = "https://files.pythonhosted.org/packages/c4/55/07904d4c8f408e70308015edcbff067eaa77514475938a9dd81b063de2a8/mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e", size = 12954190 }, + { url = "https://files.pythonhosted.org/packages/1e/b7/3a50f318979c8c541428c2f1ee973cda813bcc89614de982dafdd0df2b3e/mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525", size = 9663138 }, + { url = "https://files.pythonhosted.org/packages/f8/d4/4960d0df55f30a7625d9c3c9414dfd42f779caabae137ef73ffaed0c97b9/mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54", size = 2619257 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "nbclient" +version = "0.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jupyter-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nbformat", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e2/d2/39bc36604f24bccd44d374ac34769bc58c53a1da5acd1e83f0165aa4940e/nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09", size = 62246 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/e8/00517a23d3eeaed0513e718fbc94aab26eaa1758f5690fc8578839791c79/nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f", size = 25318 }, +] + +[[package]] +name = "nbconvert" +version = "7.16.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "bleach", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "defusedxml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jupyter-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jupyterlab-pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "markupsafe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "mistune", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nbclient", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nbformat", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pandocfilters", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tinycss2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/e8/ba521a033b21132008e520c28ceb818f9f092da5f0261e94e509401b29f9/nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4", size = 854422 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/bb/bb5b6a515d1584aa2fd89965b11db6632e4bdc69495a52374bcc36e56cfa/nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3", size = 257388 }, +] + +[[package]] +name = "nbformat" +version = "5.10.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastjsonschema", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jsonschema", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jupyter-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "traitlets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454 }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, +] + +[[package]] +name = "networkx" +version = "3.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/e6/b164f94c869d6b2c605b5128b7b0cfe912795a87fc90e78533920001f3ec/networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9", size = 2126579 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2", size = 1702396 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "numpy" +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 }, + { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 }, + { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 }, + { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 }, + { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 }, + { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 }, + { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 }, + { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 }, + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, +] + +[[package]] +name = "nvidia-cublas-cu12" +version = "12.1.3.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728", size = 410594774 }, +] + +[[package]] +name = "nvidia-cuda-cupti-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e", size = 14109015 }, +] + +[[package]] +name = "nvidia-cuda-nvrtc-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2", size = 23671734 }, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40", size = 823596 }, +] + +[[package]] +name = "nvidia-cudnn-cu12" +version = "9.1.0.70" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 }, +] + +[[package]] +name = "nvidia-cufft-cu12" +version = "11.0.2.54" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56", size = 121635161 }, +] + +[[package]] +name = "nvidia-curand-cu12" +version = "10.3.2.106" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0", size = 56467784 }, +] + +[[package]] +name = "nvidia-cusolver-cu12" +version = "11.4.5.107" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd", size = 124161928 }, +] + +[[package]] +name = "nvidia-cusparse-cu12" +version = "12.1.0.106" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c", size = 195958278 }, +] + +[[package]] +name = "nvidia-nccl-cu12" +version = "2.20.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/bb/d09dda47c881f9ff504afd6f9ca4f502ded6d8fc2f572cacc5e39da91c28/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01", size = 176238458 }, + { url = "https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56", size = 176249402 }, +] + +[[package]] +name = "nvidia-nvjitlink-cu12" +version = "12.6.20" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/b3/e456a1b2d499bb84bdc6670bfbcf41ff3bac58bd2fae6880d62834641558/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_aarch64.whl", hash = "sha256:84fb38465a5bc7c70cbc320cfd0963eb302ee25a5e939e9f512bbba55b6072fb", size = 19252608 }, + { url = "https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl", hash = "sha256:562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79", size = 19724950 }, +] + +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.1.105" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5", size = 99138 }, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/fa/fbf4001037904031639e6bfbfc02badfc7e12f137a8afa254df6c4c8a670/oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918", size = 177352 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/80/cab10959dc1faead58dc8384a781dfbf93cb4d33d50988f7a69f1b7c9bbe/oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", size = 151688 }, +] + +[[package]] +name = "ollama" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/4e/fc7ad9232c251b4885b1bf2e0f9ce35882e0f167a6ce7d3d15473dc07e7d/ollama-0.3.1.tar.gz", hash = "sha256:032572fb494a4fba200c65013fe937a65382c846b5f358d9e8918ecbc9ac44b5", size = 10033 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/25/c3442864bd77621809a208a483b0857f8d6444b7a67906b58b9dcddd1574/ollama-0.3.1-py3-none-any.whl", hash = "sha256:db50034c73d6350349bdfba19c3f0d54a3cea73eb97b35f9d7419b2fc7206454", size = 10028 }, +] + +[[package]] +name = "onnxruntime" +version = "1.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coloredlogs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "flatbuffers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sympy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/5d/7022b1506c68f1a29118130c19c320cd75129a6cae1445c3fe0093dd992c/onnxruntime-1.19.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6ce22a98dfec7b646ae305f52d0ce14a189a758b02ea501860ca719f4b0ae04b", size = 16775785 }, + { url = "https://files.pythonhosted.org/packages/64/98/8789df3b25caf732cf215a22ac80f2e45801394e8f5403c45eb24939fb21/onnxruntime-1.19.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19019c72873f26927aa322c54cf2bf7312b23451b27451f39b88f57016c94f8b", size = 11498421 }, + { url = "https://files.pythonhosted.org/packages/47/ff/8e3831e9a780be2235f6505e8cd9fb6acd7ba48d16dab7061281fc3b49e9/onnxruntime-1.19.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8eaa16df99171dc636e30108d15597aed8c4c2dd9dbfdd07cc464d57d73fb275", size = 13169357 }, + { url = "https://files.pythonhosted.org/packages/99/29/38324e534756280c68250ac178264fa33bc600523c236108c5bd0149a3ee/onnxruntime-1.19.0-cp310-cp310-win32.whl", hash = "sha256:0eb0f8dbe596fd0f4737fe511fdbb17603853a7d204c5b2ca38d3c7808fc556b", size = 9589434 }, + { url = "https://files.pythonhosted.org/packages/9d/e7/9eed7292c62c96f1acf201c3b039d9d867b54671cf2894d011619f58b0b5/onnxruntime-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:616092d54ba8023b7bc0a5f6d900a07a37cc1cfcc631873c15f8c1d6e9e184d4", size = 11083142 }, + { url = "https://files.pythonhosted.org/packages/80/16/fc200316725d04731d8ffc5d2105887a1e400d760b0c7fd464744335cd29/onnxruntime-1.19.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a2b53b3c287cd933e5eb597273926e899082d8c84ab96e1b34035764a1627e17", size = 16778356 }, + { url = "https://files.pythonhosted.org/packages/cc/3c/ff2ecf2a842822bc5e9758747bdfd4163c53af470421f07afd6cba1ced7d/onnxruntime-1.19.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e94984663963e74fbb468bde9ec6f19dcf890b594b35e249c4dc8789d08993c5", size = 11492628 }, + { url = "https://files.pythonhosted.org/packages/fa/ca/769da06e76b14a315a1effa5b01d906963379495cd82c00b5023be4c3e61/onnxruntime-1.19.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f379d1f050cfb55ce015d53727b78ee362febc065c38eed81512b22b757da73", size = 13172071 }, + { url = "https://files.pythonhosted.org/packages/75/7c/5a7e3fd98f9af3c43d6073c38afff8c18d201a72d1eba77c93dd230b8501/onnxruntime-1.19.0-cp311-cp311-win32.whl", hash = "sha256:4ccb48faea02503275ae7e79e351434fc43c294c4cb5c4d8bcb7479061396614", size = 9589924 }, + { url = "https://files.pythonhosted.org/packages/78/86/fd21288f9e4096d9c27bd0f221cb61719baa97d5e187549a9f0e84e386ae/onnxruntime-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:9cdc8d311289a84e77722de68bd22b8adfb94eea26f4be6f9e017350faac8b18", size = 11083172 }, + { url = "https://files.pythonhosted.org/packages/d1/3c/7cd126254658f0371fadf8651957387d7f743b1b85545e3b783a7f717215/onnxruntime-1.19.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1b59eaec1be9a8613c5fdeaafe67f73a062edce3ac03bbbdc9e2d98b58a30617", size = 16789643 }, + { url = "https://files.pythonhosted.org/packages/bf/6e/aae5420a45cbbcacef4c65f70067c11bed7cbb8fda12e0728f37d29746e5/onnxruntime-1.19.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be4144d014a4b25184e63ce7a463a2e7796e2f3df931fccc6a6aefa6f1365dc5", size = 11483896 }, + { url = "https://files.pythonhosted.org/packages/e6/0f/ad2ec6d490d9cb4ea82dd46382396827cb8ca9a469a56368fc7ef2fb52a4/onnxruntime-1.19.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10d7e7d4ca7021ce7f29a66dbc6071addf2de5839135339bd855c6d9c2bba371", size = 13177713 }, + { url = "https://files.pythonhosted.org/packages/de/4e/059cae46e48d183ac9b1d0be7ece1c5878711f4a31a206a9dcb34a89e3f5/onnxruntime-1.19.0-cp312-cp312-win32.whl", hash = "sha256:87f2c58b577a1fb31dc5d92b647ecc588fd5f1ea0c3ad4526f5f80a113357c8d", size = 9591661 }, + { url = "https://files.pythonhosted.org/packages/a0/ed/7ac157855cd2135ba894836ce4d027830b78d71832c9e658046e5b1b3d23/onnxruntime-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:8a1f50d49676d7b69566536ff039d9e4e95fc482a55673719f46528218ecbb94", size = 11084335 }, +] + +[[package]] +name = "openai" +version = "1.47.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "distro", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jiter", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "sniffio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/80/4c366e9113527894584a6404f105d134fae83a314dc04a6a99bd0e2459bb/openai-1.47.0.tar.gz", hash = "sha256:6e14d6f77c8cf546646afcd87a2ef752505b3710d2564a2e433e17307dfa86a0", size = 297886 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/3c/28d7cf1a3292c93e5def8acd89535ba380cb8dd888c26cdbfe420249e143/openai-1.47.0-py3-none-any.whl", hash = "sha256:9ccc8737dfa791f7bd903db4758c176b8544a8cd89d3a3d2add3cea02a34c3a0", size = 375576 }, +] + +[[package]] +name = "openapi-core" +version = "0.19.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "isodate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jsonschema", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jsonschema-path", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "more-itertools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "openapi-schema-validator", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "openapi-spec-validator", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "parse", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "werkzeug", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/34/26eef886b9a9470952ab248b961fea29e23c9fd5e5083371c1f7f0aa4443/openapi_core-0.19.3.tar.gz", hash = "sha256:5db6479ecccf76c52422961dc42b411b7625a802087d847251fdd66f0392b095", size = 109026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/08/7ed984041e003113c648583c6f3ee5a88510f8d69901d64aa08acec5cc67/openapi_core-0.19.3-py3-none-any.whl", hash = "sha256:88c8be49b083a39923ada4c1269919ba119ab617c951f901757a054a483988b0", size = 103690 }, +] + +[[package]] +name = "openapi-schema-validator" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonschema", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jsonschema-specifications", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rfc3339-validator", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/b2/7d5bdf2b26b6a95ebf4fbec294acaf4306c713f3a47c2453962511110248/openapi_schema_validator-0.6.2.tar.gz", hash = "sha256:11a95c9c9017912964e3e5f2545a5b11c3814880681fcacfb73b1759bb4f2804", size = 11860 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/dc/9aefae8891454130968ff079ece851d1ae9ccf6fb7965761f47c50c04853/openapi_schema_validator-0.6.2-py3-none-any.whl", hash = "sha256:c4887c1347c669eb7cded9090f4438b710845cd0f90d1fb9e1b3303fb37339f8", size = 8750 }, +] + +[[package]] +name = "openapi-spec-validator" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonschema", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jsonschema-path", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "lazy-object-proxy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "openapi-schema-validator", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/fe/21954ff978239dc29ebb313f5c87eeb4ec929b694b9667323086730998e2/openapi_spec_validator-0.7.1.tar.gz", hash = "sha256:8577b85a8268685da6f8aa30990b83b7960d4d1117e901d451b5d572605e5ec7", size = 37985 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/4d/e744fff95aaf3aeafc968d5ba7297c8cda0d1ecb8e3acd21b25adae4d835/openapi_spec_validator-0.7.1-py3-none-any.whl", hash = "sha256:3c81825043f24ccbcd2f4b149b11e8231abce5ba84f37065e14ec947d8f4e959", size = 38998 }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "importlib-metadata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/d4/e9a0ddef6eed086c96e8265d864a46da099611b7be153b0cfb63fd47e1b4/opentelemetry_api-1.26.0.tar.gz", hash = "sha256:2bd639e4bed5b18486fef0b5a520aaffde5a18fc225e808a1ac4df363f43a1ce", size = 60904 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/a7/6322d1d7a1fb926e8b99208c27730f21217da2f1e0e11dab48a78a0427a4/opentelemetry_api-1.26.0-py3-none-any.whl", hash = "sha256:7d7ea33adf2ceda2dd680b18b1677e4152000b37ca76e679da71ff103b943064", size = 61533 }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/84/cd/ed9eaa1d80facb6609d02af6c393b02ce3797a15742361be4859db6fdc17/opentelemetry_exporter_otlp_proto_common-1.26.0.tar.gz", hash = "sha256:bdbe50e2e22a1c71acaa0c8ba6efaadd58882e5a5978737a44a4c4b10d304c92", size = 17815 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/2f/0f7e0a73fd901c9abc6ea680d7f19a803dac830c450f21e1123d3a3ec488/opentelemetry_exporter_otlp_proto_common-1.26.0-py3-none-any.whl", hash = "sha256:ee4d8f8891a1b9c372abf8d109409e5b81947cf66423fd998e56880057afbc71", size = 17837 }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "googleapis-common-protos", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-exporter-otlp-proto-common", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-proto", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-sdk", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/23/cac89aca97ecb8f7498a875dc2ac89224b4f3345bcb8ffff643b59886196/opentelemetry_exporter_otlp_proto_grpc-1.26.0.tar.gz", hash = "sha256:a65b67a9a6b06ba1ec406114568e21afe88c1cdb29c464f2507d529eb906d8ae", size = 25239 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/0c/e4473692fec8076008c7926dfcef7223fc6d2785f04ad9d8402347a4eba9/opentelemetry_exporter_otlp_proto_grpc-1.26.0-py3-none-any.whl", hash = "sha256:e2be5eff72ebcb010675b818e8d7c2e7d61ec451755b8de67a140bc49b9b0280", size = 18228 }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.47b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/9d/de2726729dbe5d210683245315ed5a20bf90465d1cc5e7f9cb0bee6673a6/opentelemetry_instrumentation-0.47b0.tar.gz", hash = "sha256:96f9885e450c35e3f16a4f33145f2ebf620aea910c9fd74a392bbc0f807a350f", size = 24516 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/6a/be31a84ddd13e9018fcca6885e4710f227eb0fd06eda1896da67287faa2e/opentelemetry_instrumentation-0.47b0-py3-none-any.whl", hash = "sha256:88974ee52b1db08fc298334b51c19d47e53099c33740e48c4f084bd1afd052d5", size = 29218 }, +] + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.47b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asgiref", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-instrumentation", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-util-http", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/59/a5/895c3810f27cdd3bdb02320df3489d2d33f158970d8447755deb7fc3fef7/opentelemetry_instrumentation_asgi-0.47b0.tar.gz", hash = "sha256:e78b7822c1bca0511e5e9610ec484b8994a81670375e570c76f06f69af7c506a", size = 23398 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/d9/c74cb6d69589cc97d856cb3f427dfcef37ec16f9564586290c9c075d9020/opentelemetry_instrumentation_asgi-0.47b0-py3-none-any.whl", hash = "sha256:b798dc4957b3edc9dfecb47a4c05809036a4b762234c5071212fda39ead80ade", size = 15946 }, +] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.47b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-instrumentation", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-instrumentation-asgi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-util-http", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/8f/c68dbef4be5db9330b0e9f492277b0dcdc8870d86de0c749b537406c590a/opentelemetry_instrumentation_fastapi-0.47b0.tar.gz", hash = "sha256:0c7c10b5d971e99a420678ffd16c5b1ea4f0db3b31b62faf305fbb03b4ebee36", size = 17332 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/29/a97842d6dfa679bf0f3624ce1ea3458eb185befd536cafe580daa9ab68ae/opentelemetry_instrumentation_fastapi-0.47b0-py3-none-any.whl", hash = "sha256:5ac28dd401160b02e4f544a85a9e4f61a8cbe5b077ea0379d411615376a2bd21", size = 11715 }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/06/9505ef04e527fa711ebffb47f3f56cac6015405953ff688fc349d170fb9c/opentelemetry_proto-1.26.0.tar.gz", hash = "sha256:c5c18796c0cab3751fc3b98dee53855835e90c0422924b484432ac852d93dc1e", size = 34749 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/f4/66a3892eea913cded9bac0fdd3fb1a412fa2da8eb50014ec87a52648444a/opentelemetry_proto-1.26.0-py3-none-any.whl", hash = "sha256:6c4d7b4d4d9c88543bcf8c28ae3f8f0448a753dc291c18c5390444c90b76a725", size = 52466 }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/85/8ca0d5ebfe708287b091dffcd15553b74bbfe4532f8dd42662b78b2e0cab/opentelemetry_sdk-1.26.0.tar.gz", hash = "sha256:c90d2868f8805619535c05562d699e2f4fb1f00dbd55a86dcefca4da6fa02f85", size = 143139 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/f1/a9b550d0f9c049653dd2eab45cecf8fe4baa9795ed143d87834056ffabaf/opentelemetry_sdk-1.26.0-py3-none-any.whl", hash = "sha256:feb5056a84a88670c041ea0ded9921fca559efec03905dddeb3885525e0af897", size = 109475 }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.47b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/85/edef14d10ad00ddd9fffb20e4d3d938f4c5c1247e11a175066fe2b4a72f8/opentelemetry_semantic_conventions-0.47b0.tar.gz", hash = "sha256:a8d57999bbe3495ffd4d510de26a97dadc1dace53e0275001b2c1b2f67992a7e", size = 83994 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c2/ca5cef8e4cd8eec5a95deed95ec3f6005e499fd9d17ca08731ced03a6921/opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl", hash = "sha256:4ff9d595b85a59c1c1413f02bba320ce7ea6bf9e2ead2b0913c4395c7bbc1063", size = 138027 }, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.47b0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/b5/fb15aafe7391b6a36f5cd9bcb9f6c3efaeb87a0626e4d2dfef12f66ebf3e/opentelemetry_util_http-0.47b0.tar.gz", hash = "sha256:352a07664c18eef827eb8ddcbd64c64a7284a39dd1655e2f16f577eb046ccb32", size = 7863 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/7e/98749e14a4e3f4db8bc016e6b42aba40e4d934baeb8767b8658a99d0dfac/opentelemetry_util_http-0.47b0-py3-none-any.whl", hash = "sha256:3d3215e09c4a723b12da6d0233a31395aeb2bb33a64d7b15a1500690ba250f19", size = 6946 }, +] + +[[package]] +name = "orjson" +version = "3.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/03/821c8197d0515e46ea19439f5c5d5fd9a9889f76800613cfac947b5d7845/orjson-3.10.7.tar.gz", hash = "sha256:75ef0640403f945f3a1f9f6400686560dbfb0fb5b16589ad62cd477043c4eee3", size = 5056450 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/12/60931cf808b9334f26210ab496442f4a7a3d66e29d1cf12e0a01857e756f/orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12", size = 251312 }, + { url = "https://files.pythonhosted.org/packages/fe/0e/efbd0a2d25f8e82b230eb20b6b8424be6dd95b6811b669be9af16234b6db/orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac", size = 148124 }, + { url = "https://files.pythonhosted.org/packages/dd/47/1ddff6e23fe5f4aeaaed996a3cde422b3eaac4558c03751723e106184c68/orjson-3.10.7-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf6ba8ebc8ef5792e2337fb0419f8009729335bb400ece005606336b7fd7bab7", size = 147277 }, + { url = "https://files.pythonhosted.org/packages/04/da/d03d72b54bdd60d05de372114abfbd9f05050946895140c6ff5f27ab8f49/orjson-3.10.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7cf6222b29fbda9e3a472b41e6a5538b48f2c8f99261eecd60aafbdb60690c", size = 152955 }, + { url = "https://files.pythonhosted.org/packages/7f/7e/ef8522dbba112af6cc52227dcc746dd3447c7d53ea8cea35740239b547ee/orjson-3.10.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de817e2f5fc75a9e7dd350c4b0f54617b280e26d1631811a43e7e968fa71e3e9", size = 163955 }, + { url = "https://files.pythonhosted.org/packages/b6/bc/fbd345d771a73cacc5b0e774d034cd081590b336754c511f4ead9fdc4cf1/orjson-3.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:348bdd16b32556cf8d7257b17cf2bdb7ab7976af4af41ebe79f9796c218f7e91", size = 141896 }, + { url = "https://files.pythonhosted.org/packages/82/0a/1f09c12d15b1e83156b7f3f621561d38650fe5b8f39f38f04a64de1a87fc/orjson-3.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:479fd0844ddc3ca77e0fd99644c7fe2de8e8be1efcd57705b5c92e5186e8a250", size = 170166 }, + { url = "https://files.pythonhosted.org/packages/a6/d8/eee30caba21a8d6a9df06d2519bb0ecd0adbcd57f2e79d360de5570031cf/orjson-3.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fdf5197a21dd660cf19dfd2a3ce79574588f8f5e2dbf21bda9ee2d2b46924d84", size = 167804 }, + { url = "https://files.pythonhosted.org/packages/44/fe/d1d89d3f15e343511417195f6ccd2bdeb7ebc5a48a882a79ab3bbcdf5fc7/orjson-3.10.7-cp310-none-win32.whl", hash = "sha256:d374d36726746c81a49f3ff8daa2898dccab6596864ebe43d50733275c629175", size = 143010 }, + { url = "https://files.pythonhosted.org/packages/88/8c/0e7b8d5a523927774758ac4ce2de4d8ca5dda569955ba3aeb5e208344eda/orjson-3.10.7-cp310-none-win_amd64.whl", hash = "sha256:cb61938aec8b0ffb6eef484d480188a1777e67b05d58e41b435c74b9d84e0b9c", size = 137306 }, + { url = "https://files.pythonhosted.org/packages/89/c9/dd286c97c2f478d43839bd859ca4d9820e2177d4e07a64c516dc3e018062/orjson-3.10.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7db8539039698ddfb9a524b4dd19508256107568cdad24f3682d5773e60504a2", size = 251312 }, + { url = "https://files.pythonhosted.org/packages/b9/72/d90bd11e83a0e9623b3803b079478a93de8ec4316c98fa66110d594de5fa/orjson-3.10.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:480f455222cb7a1dea35c57a67578848537d2602b46c464472c995297117fa09", size = 148125 }, + { url = "https://files.pythonhosted.org/packages/9d/b6/ed61e87f327a4cbb2075ed0716e32ba68cb029aa654a68c3eb27803050d8/orjson-3.10.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a9c9b168b3a19e37fe2778c0003359f07822c90fdff8f98d9d2a91b3144d8e0", size = 147278 }, + { url = "https://files.pythonhosted.org/packages/66/9f/e6a11b5d1ad11e9dc869d938707ef93ff5ed20b53d6cda8b5e2ac532a9d2/orjson-3.10.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8de062de550f63185e4c1c54151bdddfc5625e37daf0aa1e75d2a1293e3b7d9a", size = 152954 }, + { url = "https://files.pythonhosted.org/packages/92/ee/702d5e8ccd42dc2b9d1043f22daa1ba75165616aa021dc19fb0c5a726ce8/orjson-3.10.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b0dd04483499d1de9c8f6203f8975caf17a6000b9c0c54630cef02e44ee624e", size = 163953 }, + { url = "https://files.pythonhosted.org/packages/d3/cb/55205f3f1ee6ba80c0a9a18ca07423003ca8de99192b18be30f1f31b4cdd/orjson-3.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b58d3795dafa334fc8fd46f7c5dc013e6ad06fd5b9a4cc98cb1456e7d3558bd6", size = 141895 }, + { url = "https://files.pythonhosted.org/packages/bb/ab/1185e472f15c00d37d09c395e478803ed0eae7a3a3d055a5f3885e1ea136/orjson-3.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33cfb96c24034a878d83d1a9415799a73dc77480e6c40417e5dda0710d559ee6", size = 170169 }, + { url = "https://files.pythonhosted.org/packages/53/b9/10abe9089bdb08cd4218cc45eb7abfd787c82cf301cecbfe7f141542d7f4/orjson-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e724cebe1fadc2b23c6f7415bad5ee6239e00a69f30ee423f319c6af70e2a5c0", size = 167808 }, + { url = "https://files.pythonhosted.org/packages/8a/ad/26b40ccef119dcb0f4a39745ffd7d2d319152c1a52859b1ebbd114eca19c/orjson-3.10.7-cp311-none-win32.whl", hash = "sha256:82763b46053727a7168d29c772ed5c870fdae2f61aa8a25994c7984a19b1021f", size = 143010 }, + { url = "https://files.pythonhosted.org/packages/e7/63/5f4101e4895b78ada568f4cf8f870dd594139ca2e75e654e373da78b03b0/orjson-3.10.7-cp311-none-win_amd64.whl", hash = "sha256:eb8d384a24778abf29afb8e41d68fdd9a156cf6e5390c04cc07bbc24b89e98b5", size = 137307 }, + { url = "https://files.pythonhosted.org/packages/14/7c/b4ecc2069210489696a36e42862ccccef7e49e1454a3422030ef52881b01/orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f", size = 251409 }, + { url = "https://files.pythonhosted.org/packages/60/84/e495edb919ef0c98d054a9b6d05f2700fdeba3886edd58f1c4dfb25d514a/orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3", size = 147913 }, + { url = "https://files.pythonhosted.org/packages/c5/27/e40bc7d79c4afb7e9264f22320c285d06d2c9574c9c682ba0f1be3012833/orjson-3.10.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bdbb61dcc365dd9be94e8f7df91975edc9364d6a78c8f7adb69c1cdff318ec93", size = 147390 }, + { url = "https://files.pythonhosted.org/packages/30/be/fd646fb1a461de4958a6eacf4ecf064b8d5479c023e0e71cc89b28fa91ac/orjson-3.10.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b48b3db6bb6e0a08fa8c83b47bc169623f801e5cc4f24442ab2b6617da3b5313", size = 152973 }, + { url = "https://files.pythonhosted.org/packages/b1/00/414f8d4bc5ec3447e27b5c26b4e996e4ef08594d599e79b3648f64da060c/orjson-3.10.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23820a1563a1d386414fef15c249040042b8e5d07b40ab3fe3efbfbbcbcb8864", size = 164039 }, + { url = "https://files.pythonhosted.org/packages/a0/6b/34e6904ac99df811a06e42d8461d47b6e0c9b86e2fe7ee84934df6e35f0d/orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09", size = 142035 }, + { url = "https://files.pythonhosted.org/packages/17/7e/254189d9b6df89660f65aec878d5eeaa5b1ae371bd2c458f85940445d36f/orjson-3.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d352ee8ac1926d6193f602cbe36b1643bbd1bbcb25e3c1a657a4390f3000c9a5", size = 169941 }, + { url = "https://files.pythonhosted.org/packages/02/1a/d11805670c29d3a1b29fc4bd048dc90b094784779690592efe8c9f71249a/orjson-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2d9f990623f15c0ae7ac608103c33dfe1486d2ed974ac3f40b693bad1a22a7b", size = 167994 }, + { url = "https://files.pythonhosted.org/packages/20/5f/03d89b007f9d6733dc11bc35d64812101c85d6c4e9c53af9fa7e7689cb11/orjson-3.10.7-cp312-none-win32.whl", hash = "sha256:7c4c17f8157bd520cdb7195f75ddbd31671997cbe10aee559c2d613592e7d7eb", size = 143130 }, + { url = "https://files.pythonhosted.org/packages/c6/9d/9b9fb6c60b8a0e04031ba85414915e19ecea484ebb625402d968ea45b8d5/orjson-3.10.7-cp312-none-win_amd64.whl", hash = "sha256:1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1", size = 137326 }, + { url = "https://files.pythonhosted.org/packages/15/05/121af8a87513c56745d01ad7cf215c30d08356da9ad882ebe2ba890824cd/orjson-3.10.7-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:77d325ed866876c0fa6492598ec01fe30e803272a6e8b10e992288b009cbe149", size = 251331 }, + { url = "https://files.pythonhosted.org/packages/73/7f/8d6ccd64a6f8bdbfe6c9be7c58aeb8094aa52a01fbbb2cda42ff7e312bd7/orjson-3.10.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ea2c232deedcb605e853ae1db2cc94f7390ac776743b699b50b071b02bea6fe", size = 142012 }, + { url = "https://files.pythonhosted.org/packages/04/65/f2a03fd1d4f0308f01d372e004c049f7eb9bc5676763a15f20f383fa9c01/orjson-3.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3dcfbede6737fdbef3ce9c37af3fb6142e8e1ebc10336daa05872bfb1d87839c", size = 169920 }, + { url = "https://files.pythonhosted.org/packages/e2/1c/3ef8d83d7c6a619ad3d69a4d5318591b4ce5862e6eda7c26bbe8208652ca/orjson-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11748c135f281203f4ee695b7f80bb1358a82a63905f9f0b794769483ea854ad", size = 167916 }, + { url = "https://files.pythonhosted.org/packages/f2/0d/820a640e5a7dfbe525e789c70871ebb82aff73b0c7bf80082653f86b9431/orjson-3.10.7-cp313-none-win32.whl", hash = "sha256:a7e19150d215c7a13f39eb787d84db274298d3f83d85463e61d277bbd7f401d2", size = 143089 }, + { url = "https://files.pythonhosted.org/packages/1a/72/a424db9116c7cad2950a8f9e4aeb655a7b57de988eb015acd0fcd1b4609b/orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024", size = 137081 }, +] + +[[package]] +name = "overrides" +version = "7.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 }, +] + +[[package]] +name = "packaging" +version = "24.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/65/50db4dda066951078f0a96cf12f4b9ada6e4b811516bf0262c0f4f7064d4/packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", size = 148788 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124", size = 53985 }, +] + +[[package]] +name = "pandas" +version = "2.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytz", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tzdata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/d9/ecf715f34c73ccb1d8ceb82fc01cd1028a65a5f6dbc57bfa6ea155119058/pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54", size = 4398391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/2d/39600d073ea70b9cafdc51fab91d69c72b49dd92810f24cb5ac6631f387f/pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce", size = 12551798 }, + { url = "https://files.pythonhosted.org/packages/fd/4b/0cd38e68ab690b9df8ef90cba625bf3f93b82d1c719703b8e1b333b2c72d/pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238", size = 11287392 }, + { url = "https://files.pythonhosted.org/packages/01/c6/d3d2612aea9b9f28e79a30b864835dad8f542dcf474eee09afeee5d15d75/pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08", size = 15634823 }, + { url = "https://files.pythonhosted.org/packages/89/1b/12521efcbc6058e2673583bb096c2b5046a9df39bd73eca392c1efed24e5/pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0", size = 13032214 }, + { url = "https://files.pythonhosted.org/packages/e4/d7/303dba73f1c3a9ef067d23e5afbb6175aa25e8121be79be354dcc740921a/pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51", size = 16278302 }, + { url = "https://files.pythonhosted.org/packages/ba/df/8ff7c5ed1cc4da8c6ab674dc8e4860a4310c3880df1283e01bac27a4333d/pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99", size = 13892866 }, + { url = "https://files.pythonhosted.org/packages/69/a6/81d5dc9a612cf0c1810c2ebc4f2afddb900382276522b18d128213faeae3/pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772", size = 11621592 }, + { url = "https://files.pythonhosted.org/packages/1b/70/61704497903d43043e288017cb2b82155c0d41e15f5c17807920877b45c2/pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288", size = 12574808 }, + { url = "https://files.pythonhosted.org/packages/16/c6/75231fd47afd6b3f89011e7077f1a3958441264aca7ae9ff596e3276a5d0/pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151", size = 11304876 }, + { url = "https://files.pythonhosted.org/packages/97/2d/7b54f80b93379ff94afb3bd9b0cd1d17b48183a0d6f98045bc01ce1e06a7/pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b", size = 15602548 }, + { url = "https://files.pythonhosted.org/packages/fc/a5/4d82be566f069d7a9a702dcdf6f9106df0e0b042e738043c0cc7ddd7e3f6/pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee", size = 13031332 }, + { url = "https://files.pythonhosted.org/packages/92/a2/b79c48f530673567805e607712b29814b47dcaf0d167e87145eb4b0118c6/pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db", size = 16286054 }, + { url = "https://files.pythonhosted.org/packages/40/c7/47e94907f1d8fdb4868d61bd6c93d57b3784a964d52691b77ebfdb062842/pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1", size = 13879507 }, + { url = "https://files.pythonhosted.org/packages/ab/63/966db1321a0ad55df1d1fe51505d2cdae191b84c907974873817b0a6e849/pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24", size = 11634249 }, + { url = "https://files.pythonhosted.org/packages/dd/49/de869130028fb8d90e25da3b7d8fb13e40f5afa4c4af1781583eb1ff3839/pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef", size = 12500886 }, + { url = "https://files.pythonhosted.org/packages/db/7c/9a60add21b96140e22465d9adf09832feade45235cd22f4cb1668a25e443/pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce", size = 11340320 }, + { url = "https://files.pythonhosted.org/packages/b0/85/f95b5f322e1ae13b7ed7e97bd999160fa003424711ab4dc8344b8772c270/pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad", size = 15204346 }, + { url = "https://files.pythonhosted.org/packages/40/10/79e52ef01dfeb1c1ca47a109a01a248754ebe990e159a844ece12914de83/pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad", size = 12733396 }, + { url = "https://files.pythonhosted.org/packages/35/9d/208febf8c4eb5c1d9ea3314d52d8bd415fd0ef0dd66bb24cc5bdbc8fa71a/pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76", size = 15858913 }, + { url = "https://files.pythonhosted.org/packages/99/d1/2d9bd05def7a9e08a92ec929b5a4c8d5556ec76fae22b0fa486cbf33ea63/pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32", size = 13417786 }, + { url = "https://files.pythonhosted.org/packages/22/a5/a0b255295406ed54269814bc93723cfd1a0da63fb9aaf99e1364f07923e5/pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23", size = 11498828 }, +] + +[[package]] +name = "pandocfilters" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663 }, +] + +[[package]] +name = "parse" +version = "1.20.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/78/d9b09ba24bb36ef8b83b71be547e118d46214735b6dfb39e4bfde0e9b9dd/parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce", size = 29391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/31/ba45bf0b2aa7898d81cbbfac0e88c267befb59ad91a19e36e1bc5578ddb1/parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558", size = 20126 }, +] + +[[package]] +name = "parso" +version = "0.8.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650 }, +] + +[[package]] +name = "pathable" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/ed/e0e29300253b61dea3b7ec3a31f5d061d577c2a6fd1e35c5cfd0e6f2cd6d/pathable-0.4.3.tar.gz", hash = "sha256:5c869d315be50776cc8a993f3af43e0c60dc01506b399643f919034ebf4cdcab", size = 8679 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/0a/acfb251ba01009d3053f04f4661e96abf9d485266b04a0a4deebc702d9cb/pathable-0.4.3-py3-none-any.whl", hash = "sha256:cdd7b1f9d7d5c8b8d3315dbf5a86b2596053ae845f056f57d97c0eefff84da14", size = 9587 }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, +] + +[[package]] +name = "pillow" +version = "10.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/69/a31cccd538ca0b5272be2a38347f8839b97a14be104ea08b0db92f749c74/pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", size = 3509271 }, + { url = "https://files.pythonhosted.org/packages/9a/9e/4143b907be8ea0bce215f2ae4f7480027473f8b61fcedfda9d851082a5d2/pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", size = 3375658 }, + { url = "https://files.pythonhosted.org/packages/8a/25/1fc45761955f9359b1169aa75e241551e74ac01a09f487adaaf4c3472d11/pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", size = 4332075 }, + { url = "https://files.pythonhosted.org/packages/5e/dd/425b95d0151e1d6c951f45051112394f130df3da67363b6bc75dc4c27aba/pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", size = 4444808 }, + { url = "https://files.pythonhosted.org/packages/b1/84/9a15cc5726cbbfe7f9f90bfb11f5d028586595907cd093815ca6644932e3/pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", size = 4356290 }, + { url = "https://files.pythonhosted.org/packages/b5/5b/6651c288b08df3b8c1e2f8c1152201e0b25d240e22ddade0f1e242fc9fa0/pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", size = 4525163 }, + { url = "https://files.pythonhosted.org/packages/07/8b/34854bf11a83c248505c8cb0fcf8d3d0b459a2246c8809b967963b6b12ae/pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", size = 4463100 }, + { url = "https://files.pythonhosted.org/packages/78/63/0632aee4e82476d9cbe5200c0cdf9ba41ee04ed77887432845264d81116d/pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", size = 4592880 }, + { url = "https://files.pythonhosted.org/packages/df/56/b8663d7520671b4398b9d97e1ed9f583d4afcbefbda3c6188325e8c297bd/pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", size = 2235218 }, + { url = "https://files.pythonhosted.org/packages/f4/72/0203e94a91ddb4a9d5238434ae6c1ca10e610e8487036132ea9bf806ca2a/pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", size = 2554487 }, + { url = "https://files.pythonhosted.org/packages/bd/52/7e7e93d7a6e4290543f17dc6f7d3af4bd0b3dd9926e2e8a35ac2282bc5f4/pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1", size = 2243219 }, + { url = "https://files.pythonhosted.org/packages/a7/62/c9449f9c3043c37f73e7487ec4ef0c03eb9c9afc91a92b977a67b3c0bbc5/pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", size = 3509265 }, + { url = "https://files.pythonhosted.org/packages/f4/5f/491dafc7bbf5a3cc1845dc0430872e8096eb9e2b6f8161509d124594ec2d/pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", size = 3375655 }, + { url = "https://files.pythonhosted.org/packages/73/d5/c4011a76f4207a3c151134cd22a1415741e42fa5ddecec7c0182887deb3d/pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", size = 4340304 }, + { url = "https://files.pythonhosted.org/packages/ac/10/c67e20445a707f7a610699bba4fe050583b688d8cd2d202572b257f46600/pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", size = 4452804 }, + { url = "https://files.pythonhosted.org/packages/a9/83/6523837906d1da2b269dee787e31df3b0acb12e3d08f024965a3e7f64665/pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", size = 4365126 }, + { url = "https://files.pythonhosted.org/packages/ba/e5/8c68ff608a4203085158cff5cc2a3c534ec384536d9438c405ed6370d080/pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", size = 4533541 }, + { url = "https://files.pythonhosted.org/packages/f4/7c/01b8dbdca5bc6785573f4cee96e2358b0918b7b2c7b60d8b6f3abf87a070/pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", size = 4471616 }, + { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802 }, + { url = "https://files.pythonhosted.org/packages/4d/d7/a44f193d4c26e58ee5d2d9db3d4854b2cfb5b5e08d360a5e03fe987c0086/pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", size = 2235213 }, + { url = "https://files.pythonhosted.org/packages/c1/d0/5866318eec2b801cdb8c82abf190c8343d8a1cd8bf5a0c17444a6f268291/pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", size = 2554498 }, + { url = "https://files.pythonhosted.org/packages/d4/c8/310ac16ac2b97e902d9eb438688de0d961660a87703ad1561fd3dfbd2aa0/pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", size = 2243219 }, + { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350 }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980 }, + { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799 }, + { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973 }, + { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054 }, + { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484 }, + { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375 }, + { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773 }, + { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690 }, + { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951 }, + { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427 }, + { url = "https://files.pythonhosted.org/packages/c3/00/706cebe7c2c12a6318aabe5d354836f54adff7156fd9e1bd6c89f4ba0e98/pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3", size = 3525685 }, + { url = "https://files.pythonhosted.org/packages/cf/76/f658cbfa49405e5ecbfb9ba42d07074ad9792031267e782d409fd8fe7c69/pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb", size = 3374883 }, + { url = "https://files.pythonhosted.org/packages/46/2b/99c28c4379a85e65378211971c0b430d9c7234b1ec4d59b2668f6299e011/pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70", size = 4339837 }, + { url = "https://files.pythonhosted.org/packages/f1/74/b1ec314f624c0c43711fdf0d8076f82d9d802afd58f1d62c2a86878e8615/pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be", size = 4455562 }, + { url = "https://files.pythonhosted.org/packages/4a/2a/4b04157cb7b9c74372fa867096a1607e6fedad93a44deeff553ccd307868/pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0", size = 4366761 }, + { url = "https://files.pythonhosted.org/packages/ac/7b/8f1d815c1a6a268fe90481232c98dd0e5fa8c75e341a75f060037bd5ceae/pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc", size = 4536767 }, + { url = "https://files.pythonhosted.org/packages/e5/77/05fa64d1f45d12c22c314e7b97398ffb28ef2813a485465017b7978b3ce7/pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a", size = 4477989 }, + { url = "https://files.pythonhosted.org/packages/12/63/b0397cfc2caae05c3fb2f4ed1b4fc4fc878f0243510a7a6034ca59726494/pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309", size = 4610255 }, + { url = "https://files.pythonhosted.org/packages/7b/f9/cfaa5082ca9bc4a6de66ffe1c12c2d90bf09c309a5f52b27759a596900e7/pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060", size = 2235603 }, + { url = "https://files.pythonhosted.org/packages/01/6a/30ff0eef6e0c0e71e55ded56a38d4859bf9d3634a94a88743897b5f96936/pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea", size = 2554972 }, + { url = "https://files.pythonhosted.org/packages/48/2c/2e0a52890f269435eee38b21c8218e102c621fe8d8df8b9dd06fabf879ba/pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d", size = 2243375 }, + { url = "https://files.pythonhosted.org/packages/38/30/095d4f55f3a053392f75e2eae45eba3228452783bab3d9a920b951ac495c/pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", size = 3493889 }, + { url = "https://files.pythonhosted.org/packages/f3/e8/4ff79788803a5fcd5dc35efdc9386af153569853767bff74540725b45863/pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", size = 3346160 }, + { url = "https://files.pythonhosted.org/packages/d7/ac/4184edd511b14f760c73f5bb8a5d6fd85c591c8aff7c2229677a355c4179/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", size = 3435020 }, + { url = "https://files.pythonhosted.org/packages/da/21/1749cd09160149c0a246a81d646e05f35041619ce76f6493d6a96e8d1103/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", size = 3490539 }, + { url = "https://files.pythonhosted.org/packages/b6/f5/f71fe1888b96083b3f6dfa0709101f61fc9e972c0c8d04e9d93ccef2a045/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", size = 3476125 }, + { url = "https://files.pythonhosted.org/packages/96/b9/c0362c54290a31866c3526848583a2f45a535aa9d725fd31e25d318c805f/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", size = 3579373 }, + { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661 }, +] + +[[package]] +name = "pinecone-client" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pinecone-plugin-inference", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pinecone-plugin-interface", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/739fe0a4a173658d541206ec7fdb0cc4c9ddc364de216af668b988bf0868/pinecone_client-5.0.1.tar.gz", hash = "sha256:11c33ff5d1c38a6ce69e69fe532c0f22f312fb28d761bb30b3767816d3181d64", size = 122207 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/d0/c64336b8f76e63296d04b885c545c0872ff070e6b2bc725dd0ff3ae681dc/pinecone_client-5.0.1-py3-none-any.whl", hash = "sha256:c8f7835e1045ba84e295f217a8e85573ffb80b41501bbc1af6d92c9631c567a7", size = 244818 }, +] + +[[package]] +name = "pinecone-plugin-inference" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pinecone-plugin-interface", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fe/40/7b2a88e68ede294dc293c6196a71f3d6583d403320a2008153b095cd3e39/pinecone_plugin_inference-1.0.3.tar.gz", hash = "sha256:c6519ba730123713a181c010f0db9d6449d11de451b8e79bec4efd662b096f41", size = 54372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/b7/0d57cad06545ac8fbb7a362dddaff01b0ecfe6e47c135345e94b3d8ab2ca/pinecone_plugin_inference-1.0.3-py3-none-any.whl", hash = "sha256:bbdfe5dba99a87374d9e3315b62b8e1bbca52d5fe069a64cd6b212efbc8b9afd", size = 117566 }, +] + +[[package]] +name = "pinecone-plugin-interface" +version = "0.0.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/fb/e8a4063264953ead9e2b24d9b390152c60f042c951c47f4592e9996e57ff/pinecone_plugin_interface-0.0.7.tar.gz", hash = "sha256:b8e6675e41847333aa13923cc44daa3f85676d7157324682dc1640588a982846", size = 3370 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/1d/a21fdfcd6d022cb64cef5c2a29ee6691c6c103c4566b41646b080b7536a5/pinecone_plugin_interface-0.0.7-py3-none-any.whl", hash = "sha256:875857ad9c9fc8bbc074dbe780d187a2afd21f5bfe0f3b08601924a61ef1bba8", size = 6249 }, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/52/0763d1d976d5c262df53ddda8d8d4719eedf9594d046f117c25a27261a19/platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3", size = 20916 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/13/2aa1f0e1364feb2c9ef45302f387ac0bd81484e9c9a4c5688a322fbdfd08/platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee", size = 18146 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "portalocker" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "(platform_system == 'Windows' and sys_platform == 'darwin') or (platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/fb/a70a4214956182e0d7a9099ab17d50bfcba1056188e9b14f35b9e2b62a0d/portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf", size = 18423 }, +] + +[[package]] +name = "posthog" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backoff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "monotonic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/c8/8a7308d5355fedfc400098a75fd191cf615b55aa22ef2a937995326e6f5e/posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef", size = 38142 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/5f/24cb22118db0e11703b6b80ef9f982eadde21eb585c3a769719e48dce893/posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76", size = 41300 }, +] + +[[package]] +name = "prance" +version = "23.6.21.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "chardet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "ruamel-yaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/f0/bcb5ffc8b7ab8e3d02dbef3bd945cf8fd6e12c146774f900659406b9fce1/prance-23.6.21.0.tar.gz", hash = "sha256:d8c15f8ac34019751cc4945f866d8d964d7888016d10de3592e339567177cabe", size = 2798776 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/db/4fb4901ee61274d0ab97746461fc5f2637e5d73aa73f34ee28e941a699a1/prance-23.6.21.0-py3-none-any.whl", hash = "sha256:6a4276fa07ed9f22feda4331097d7503c4adc3097e46ffae97425f2c1026bd9f", size = 36279 }, +] + +[[package]] +name = "pre-commit" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "identify", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nodeenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "virtualenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/10/97ee2fa54dff1e9da9badbc5e35d0bbaef0776271ea5907eccf64140f72f/pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af", size = 177815 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/92/caae8c86e94681b42c246f0bca35c059a2f0529e5b92619f6aba4cf7e7b6/pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f", size = 204643 }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/47/6d/0279b119dafc74c1220420028d490c4399b790fc1256998666e3a341879f/prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360", size = 425859 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/23/22750c4b768f09386d1c3cc4337953e8936f48a888fa6dddfb669b2c9088/prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10", size = 386411 }, +] + +[[package]] +name = "proto-plus" +version = "1.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3e/fc/e9a65cd52c1330d8d23af6013651a0bc50b6d76bcbdf91fae7cd19c68f29/proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445", size = 55942 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/6f/db31f0711c0402aa477257205ce7d29e86a75cb52cd19f7afb585f75cda0/proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12", size = 50080 }, +] + +[[package]] +name = "protobuf" +version = "4.25.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/ab/cb61a4b87b2e7e6c312dce33602bd5884797fd054e0e53205f1c27cf0f66/protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d", size = 380283 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/43/27b48d9040763b78177d3083e16c70dba6e3c3ee2af64b659f6332c2b06e/protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4", size = 392409 }, + { url = "https://files.pythonhosted.org/packages/0c/d4/589d673ada9c4c62d5f155218d7ff7ac796efb9c6af95b0bd29d438ae16e/protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d", size = 413398 }, + { url = "https://files.pythonhosted.org/packages/34/ca/bf85ffe3dd16f1f2aaa6c006da8118800209af3da160ae4d4f47500eabd9/protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b", size = 394160 }, + { url = "https://files.pythonhosted.org/packages/68/1d/e8961af9a8e534d66672318d6b70ea8e3391a6b13e16a29b039e4a99c214/protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835", size = 293700 }, + { url = "https://files.pythonhosted.org/packages/ca/6c/cc7ab2fb3a4a7f07f211d8a7bbb76bba633eb09b148296dbd4281e217f95/protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040", size = 294612 }, + { url = "https://files.pythonhosted.org/packages/b5/95/0ba7f66934a0a798006f06fc3d74816da2b7a2bcfd9b98c53d26f684c89e/protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978", size = 156464 }, +] + +[[package]] +name = "psutil" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/c7/8c6872f7372eb6a6b2e4708b88419fb46b857f7a2e1892966b851cc79fc9/psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", size = 508067 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/66/78c9c3020f573c58101dc43a44f6855d01bbbd747e24da2f0c4491200ea3/psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", size = 249766 }, + { url = "https://files.pythonhosted.org/packages/e1/3f/2403aa9558bea4d3854b0e5e567bc3dd8e9fbc1fc4453c0aa9aafeb75467/psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", size = 253024 }, + { url = "https://files.pythonhosted.org/packages/0b/37/f8da2fbd29690b3557cca414c1949f92162981920699cd62095a984983bf/psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", size = 250961 }, + { url = "https://files.pythonhosted.org/packages/35/56/72f86175e81c656a01c4401cd3b1c923f891b31fbcebe98985894176d7c9/psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", size = 287478 }, + { url = "https://files.pythonhosted.org/packages/19/74/f59e7e0d392bc1070e9a70e2f9190d652487ac115bb16e2eff6b22ad1d24/psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", size = 290455 }, + { url = "https://files.pythonhosted.org/packages/cd/5f/60038e277ff0a9cc8f0c9ea3d0c5eb6ee1d2470ea3f9389d776432888e47/psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132", size = 292046 }, + { url = "https://files.pythonhosted.org/packages/8b/20/2ff69ad9c35c3df1858ac4e094f20bd2374d33c8643cf41da8fd7cdcb78b/psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d", size = 253560 }, + { url = "https://files.pythonhosted.org/packages/73/44/561092313ae925f3acfaace6f9ddc4f6a9c748704317bad9c8c8f8a36a79/psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3", size = 257399 }, + { url = "https://files.pythonhosted.org/packages/7c/06/63872a64c312a24fb9b4af123ee7007a306617da63ff13bcc1432386ead7/psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0", size = 251988 }, +] + +[[package]] +name = "psycopg" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/8e/f176997fd790d3dce9fa0ca695391beaeee39af7ecd6d426c4c063cf6744/psycopg-3.2.1.tar.gz", hash = "sha256:dc8da6dc8729dacacda3cc2f17d2c9397a70a66cf0d2b69c91065d60d5f00cb7", size = 155313 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/0f755db36f47f96464463385552f8f132a981731356837c9a30a11ab2d35/psycopg-3.2.1-py3-none-any.whl", hash = "sha256:ece385fb413a37db332f97c49208b36cf030ff02b199d7635ed2fbd378724175", size = 197743 }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "(implementation_name != 'pypy' and sys_platform == 'darwin') or (implementation_name != 'pypy' and sys_platform == 'linux') or (implementation_name != 'pypy' and sys_platform == 'win32')" }, +] +pool = [ + { name = "psycopg-pool", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/43/a57494f47d29bd371ab38b745b2e93b9d486067391631c50beda889e7706/psycopg_binary-3.2.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:cad2de17804c4cfee8640ae2b279d616bb9e4734ac3c17c13db5e40982bd710d", size = 3379984 }, + { url = "https://files.pythonhosted.org/packages/bb/60/3b23bab21de16d08a15612ebf8727604b13f0d7457c66b5cf3fed05420eb/psycopg_binary-3.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:592b27d6c46a40f9eeaaeea7c1fef6f3c60b02c634365eb649b2d880669f149f", size = 3501361 }, + { url = "https://files.pythonhosted.org/packages/40/88/f8055b32f72bed87a7989254975aa9d5a692356df6ba0971f10c53b73420/psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a997efbaadb5e1a294fb5760e2f5643d7b8e4e3fe6cb6f09e6d605fd28e0291", size = 4467387 }, + { url = "https://files.pythonhosted.org/packages/aa/6b/b3fd5c22212172cb480775e0c19bb13926581536f131d9fd44def27385cf/psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1d2b6438fb83376f43ebb798bf0ad5e57bc56c03c9c29c85bc15405c8c0ac5a", size = 4269322 }, + { url = "https://files.pythonhosted.org/packages/d5/ba/1682c91820235c6aa953772e28aa133488e827cbd17f35bb3e9140c922d4/psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1f087bd84bdcac78bf9f024ebdbfacd07fc0a23ec8191448a50679e2ac4a19e", size = 4513654 }, + { url = "https://files.pythonhosted.org/packages/64/dd/bc81a1e5da6827efbf80c6881e36feb425dcd43efd91182ae50c07d19b1c/psycopg_binary-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:415c3b72ea32119163255c6504085f374e47ae7345f14bc3f0ef1f6e0976a879", size = 4213509 }, + { url = "https://files.pythonhosted.org/packages/75/13/edc342fbb4347affbc4df85300c69e56c5f56648d0ed63ae954448915c83/psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f092114f10f81fb6bae544a0ec027eb720e2d9c74a4fcdaa9dd3899873136935", size = 3135832 }, + { url = "https://files.pythonhosted.org/packages/f5/b6/2079baff967b5f42f5f3d5476cfd70d85f0931e382b9e107e2653850fe0a/psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06a7aae34edfe179ddc04da005e083ff6c6b0020000399a2cbf0a7121a8a22ea", size = 3113278 }, + { url = "https://files.pythonhosted.org/packages/58/a0/c1c31306361142197a736eda60bc7ff4d735e481e8bb63b55d95cc982e3f/psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0b018631e5c80ce9bc210b71ea885932f9cca6db131e4df505653d7e3873a938", size = 3222019 }, + { url = "https://files.pythonhosted.org/packages/c4/39/7af53a485b916232d7423dc58e610a79961af6f4e2c3827ec111cdb3684e/psycopg_binary-3.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f8a509aeaac364fa965454e80cd110fe6d48ba2c80f56c9b8563423f0b5c3cfd", size = 3253667 }, + { url = "https://files.pythonhosted.org/packages/31/65/28feb23d1ab2d9d1215899faeedd2504fca37e0dbae546e9e7e62fee05f6/psycopg_binary-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:413977d18412ff83486eeb5875eb00b185a9391c57febac45b8993bf9c0ff489", size = 2922341 }, + { url = "https://files.pythonhosted.org/packages/43/68/f49dd22dc9f9869597d90fff73dcc8c9754304cdfeefa5f463abb4a1fcce/psycopg_binary-3.2.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:62b1b7b07e00ee490afb39c0a47d8282a9c2822c7cfed9553a04b0058adf7e7f", size = 3388952 }, + { url = "https://files.pythonhosted.org/packages/ef/3c/90210e090be228e9876bc210576cfd75e240505f16c92fa8b11839acbf35/psycopg_binary-3.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8afb07114ea9b924a4a0305ceb15354ccf0ef3c0e14d54b8dbeb03e50182dd7", size = 3506474 }, + { url = "https://files.pythonhosted.org/packages/9d/2a/d45ff1f4b8d5b334695f3f5a68c722dbf483b65348f2e2639cf2f45c7b73/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bb515d042f6a345714ec0403df68ccf13f73b05e567837d80c886c7c9d3805", size = 4464849 }, + { url = "https://files.pythonhosted.org/packages/8c/ce/60562887f1363747ce2e074841548f96b433dd50e78d822c88e7ad6ec817/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6418712ba63cebb0c88c050b3997185b0ef54173b36568522d5634ac06153040", size = 4263085 }, + { url = "https://files.pythonhosted.org/packages/2e/4f/af3cb85b967d2616c9c4e2bea9e865c8d0c38fc83ce5db1ef050ceba2bea/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:101472468d59c74bb8565fab603e032803fd533d16be4b2d13da1bab8deb32a3", size = 4514411 }, + { url = "https://files.pythonhosted.org/packages/1d/00/685055d15f70e57d24cffe59021d53d428cdd7126b87442b5b07c9ffd222/psycopg_binary-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3931f308ab4a479d0ee22dc04bea867a6365cac0172e5ddcba359da043854b", size = 4207636 }, + { url = "https://files.pythonhosted.org/packages/72/9f/d6f6c8f60c4ebcc270efda17ab22110b24934f610dc7d5d3e2dc1e9eecbc/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dc314a47d44fe1a8069b075a64abffad347a3a1d8652fed1bab5d3baea37acb2", size = 3132484 }, + { url = "https://files.pythonhosted.org/packages/8e/e8/742cca374ab3725606f79a9b3b2429bba73917e1d14d52ba39d83dec0a3c/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc304a46be1e291031148d9d95c12451ffe783ff0cc72f18e2cc7ec43cdb8c68", size = 3111128 }, + { url = "https://files.pythonhosted.org/packages/61/a9/046536ef56a785e12c72c2a2507058473889bd7d625fbce142f1a1662bc2/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f9e13600647087df5928875559f0eb8f496f53e6278b7da9511b4b3d0aff960", size = 3213088 }, + { url = "https://files.pythonhosted.org/packages/2d/40/a988739a5d8e72c553a44abba71217c601400e5164a874916e2aa4285139/psycopg_binary-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b140182830c76c74d17eba27df3755a46442ce8d4fb299e7f1cf2f74a87c877b", size = 3252404 }, + { url = "https://files.pythonhosted.org/packages/c7/16/bfefaa5417e05f77c12f1cd099da7a00666fb2c8aef5996014f255a29857/psycopg_binary-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:3c838806eeb99af39f934b7999e35f947a8e577997cc892c12b5053a97a9057f", size = 2925802 }, + { url = "https://files.pythonhosted.org/packages/50/5d/51d39aafab4384a744d5e927b7867f3dadd8537249e8173e34aaf894db94/psycopg_binary-3.2.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7066d3dca196ed0dc6172f9777b2d62e4f138705886be656cccff2d555234d60", size = 3359766 }, + { url = "https://files.pythonhosted.org/packages/e4/7b/75be686af04e2019b53a9ff22de3aa750db7d34f532e4b949ed15a78b627/psycopg_binary-3.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:28ada5f610468c57d8a4a055a8ea915d0085a43d794266c4f3b9d02f4288f4db", size = 3503325 }, + { url = "https://files.pythonhosted.org/packages/3f/9a/28da916a65fb40fb3e1a97e1ae0a26860d8c1265c6e9766bd6c47abc437b/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8213bf50af073b1aa8dc3cff123bfeedac86332a16c1b7274910bc88a847c7", size = 4443593 }, + { url = "https://files.pythonhosted.org/packages/b0/9a/3dc1237a2ef3344b347af79e1aad2a60277cfafa2846f54cb13e1cd8c528/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74d623261655a169bc84a9669890975c229f2fa6e19a7f2d10a77675dcf1a707", size = 4247005 }, + { url = "https://files.pythonhosted.org/packages/d0/a9/06491cb0338b6f0868d349d2a526586dc165e508b64daa2ff45f9db7ba4b/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42781ba94e8842ee98bca5a7d0c44cc9d067500fedca2d6a90fa3609b6d16b42", size = 4484179 }, + { url = "https://files.pythonhosted.org/packages/4b/5f/b1116467dd18b4efc1aa7f03c96da751724a43c6a630979c61f60a9fbe5f/psycopg_binary-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e6669091d09f8ba36e10ce678a6d9916e110446236a9b92346464a3565635e", size = 4186490 }, + { url = "https://files.pythonhosted.org/packages/a4/87/6092d1701d36c5aeb74c35cb54266fd44ee0f7711cafa4c0bffd873bdb61/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b09e8a576a2ac69d695032ee76f31e03b30781828b5dd6d18c6a009e5a3d1c35", size = 3109385 }, + { url = "https://files.pythonhosted.org/packages/62/61/4ad7e29d09202478b6f568fff19efa978a4f2c25cb5efcd73544a4ee8be7/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8f28ff0cb9f1defdc4a6f8c958bf6787274247e7dfeca811f6e2f56602695fb1", size = 3094397 }, + { url = "https://files.pythonhosted.org/packages/b7/dd/0ae42c64bf524d1fcf9bf861ab09d331e693ae00e527ba08131b2d3729a3/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4c84fcac8a3a3479ac14673095cc4e1fdba2935499f72c436785ac679bec0d1a", size = 3184097 }, + { url = "https://files.pythonhosted.org/packages/dd/f0/09329ebb0cd03e2ee5786fc9914ac904f4965b78627f15826f8258fde734/psycopg_binary-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:950fd666ec9e9fe6a8eeb2b5a8f17301790e518953730ad44d715b59ffdbc67f", size = 3228517 }, + { url = "https://files.pythonhosted.org/packages/60/2f/979228189adbeb59afce626f1e7c3bf73cc7ff94217099a2ddfd6fd132ff/psycopg_binary-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:334046a937bb086c36e2c6889fe327f9f29bfc085d678f70fac0b0618949f674", size = 2911959 }, +] + +[[package]] +name = "psycopg-pool" +version = "3.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/57/9353b9ca259eaa3f0da2780eae7136948e70a8423e66b08a1115e7501860/psycopg_pool-3.2.2.tar.gz", hash = "sha256:9e22c370045f6d7f2666a5ad1b0caf345f9f1912195b0b25d0d3bcc4f3a7389c", size = 29665 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/0f/1cbe48737ac568e09fe03fbbcc585cdb535b5efb7709ba9b3f38a7ad7645/psycopg_pool-3.2.2-py3-none-any.whl", hash = "sha256:273081d0fbfaced4f35e69200c89cb8fbddfe277c38cc86c235b90a2ec2c8153", size = 38140 }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 }, +] + +[[package]] +name = "pyarrow" +version = "17.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/4e/ea6d43f324169f8aec0e57569443a38bab4b398d09769ca64f7b4d467de3/pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28", size = 1112479 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/5d/78d4b040bc5ff2fc6c3d03e80fca396b742f6c125b8af06bcf7427f931bc/pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07", size = 28994846 }, + { url = "https://files.pythonhosted.org/packages/3b/73/8ed168db7642e91180330e4ea9f3ff8bab404678f00d32d7df0871a4933b/pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655", size = 27165908 }, + { url = "https://files.pythonhosted.org/packages/81/36/e78c24be99242063f6d0590ef68c857ea07bdea470242c361e9a15bd57a4/pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545", size = 39264209 }, + { url = "https://files.pythonhosted.org/packages/18/4c/3db637d7578f683b0a8fb8999b436bdbedd6e3517bd4f90c70853cf3ad20/pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2", size = 39862883 }, + { url = "https://files.pythonhosted.org/packages/81/3c/0580626896c842614a523e66b351181ed5bb14e5dfc263cd68cea2c46d90/pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8", size = 38723009 }, + { url = "https://files.pythonhosted.org/packages/ee/fb/c1b47f0ada36d856a352da261a44d7344d8f22e2f7db3945f8c3b81be5dd/pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047", size = 39855626 }, + { url = "https://files.pythonhosted.org/packages/19/09/b0a02908180a25d57312ab5919069c39fddf30602568980419f4b02393f6/pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087", size = 25147242 }, + { url = "https://files.pythonhosted.org/packages/f9/46/ce89f87c2936f5bb9d879473b9663ce7a4b1f4359acc2f0eb39865eaa1af/pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977", size = 29028748 }, + { url = "https://files.pythonhosted.org/packages/8d/8e/ce2e9b2146de422f6638333c01903140e9ada244a2a477918a368306c64c/pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3", size = 27190965 }, + { url = "https://files.pythonhosted.org/packages/3b/c8/5675719570eb1acd809481c6d64e2136ffb340bc387f4ca62dce79516cea/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15", size = 39269081 }, + { url = "https://files.pythonhosted.org/packages/5e/78/3931194f16ab681ebb87ad252e7b8d2c8b23dad49706cadc865dff4a1dd3/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597", size = 39864921 }, + { url = "https://files.pythonhosted.org/packages/d8/81/69b6606093363f55a2a574c018901c40952d4e902e670656d18213c71ad7/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420", size = 38740798 }, + { url = "https://files.pythonhosted.org/packages/4c/21/9ca93b84b92ef927814cb7ba37f0774a484c849d58f0b692b16af8eebcfb/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4", size = 39871877 }, + { url = "https://files.pythonhosted.org/packages/30/d1/63a7c248432c71c7d3ee803e706590a0b81ce1a8d2b2ae49677774b813bb/pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03", size = 25151089 }, + { url = "https://files.pythonhosted.org/packages/d4/62/ce6ac1275a432b4a27c55fe96c58147f111d8ba1ad800a112d31859fae2f/pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22", size = 29019418 }, + { url = "https://files.pythonhosted.org/packages/8e/0a/dbd0c134e7a0c30bea439675cc120012337202e5fac7163ba839aa3691d2/pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053", size = 27152197 }, + { url = "https://files.pythonhosted.org/packages/cb/05/3f4a16498349db79090767620d6dc23c1ec0c658a668d61d76b87706c65d/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a", size = 39263026 }, + { url = "https://files.pythonhosted.org/packages/c2/0c/ea2107236740be8fa0e0d4a293a095c9f43546a2465bb7df34eee9126b09/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc", size = 39880798 }, + { url = "https://files.pythonhosted.org/packages/f6/b0/b9164a8bc495083c10c281cc65064553ec87b7537d6f742a89d5953a2a3e/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a", size = 38715172 }, + { url = "https://files.pythonhosted.org/packages/f1/c4/9625418a1413005e486c006e56675334929fad864347c5ae7c1b2e7fe639/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b", size = 39874508 }, + { url = "https://files.pythonhosted.org/packages/ae/49/baafe2a964f663413be3bd1cf5c45ed98c5e42e804e2328e18f4570027c1/pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7", size = 25099235 }, +] + +[[package]] +name = "pyasn1" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/a3/d2157f333900747f20984553aca98008b6dc843eb62f3a36030140ccec0d/pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c", size = 148088 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/7e/5f50d07d5e70a2addbccd90ac2950f81d1edd0783630651d9268d7f1db49/pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473", size = 85313 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/00/e7bd1dec10667e3f2be602686537969a7ac92b0a7c5165be2e5875dc3971/pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6", size = 307859 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/68/8906226b15ef38e71dc926c321d2fe99de8048e9098b5dfd38343011c886/pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b", size = 181220 }, +] + +[[package]] +name = "pybars4" +version = "0.9.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pymeta3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ee/52/9aa428633ef5aba4b096b2b2f8d046ece613cecab28b4ceed54126d25ea5/pybars4-0.9.13.tar.gz", hash = "sha256:425817da20d4ad320bc9b8e77a60cab1bb9d3c677df3dce224925c3310fcd635", size = 29907 } + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pycryptodome" +version = "3.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/ed/19223a0a0186b8a91ebbdd2852865839237a21c74f1fbc4b8d5b62965239/pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7", size = 4794232 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/96/b0d494defb3346378086848a8ece5ddfd138a66c4a05e038fca873b2518c/pycryptodome-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044", size = 2427142 }, + { url = "https://files.pythonhosted.org/packages/24/80/56a04e2ae622d7f38c1c01aef46a26c6b73a2ad15c9705a8e008b5befb03/pycryptodome-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a", size = 1590045 }, + { url = "https://files.pythonhosted.org/packages/ea/94/82ebfa5c83d980907ceebf79b00909a569d258bdfd9b0264d621fa752cfd/pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2", size = 2061748 }, + { url = "https://files.pythonhosted.org/packages/af/20/5f29ec45462360e7f61e8688af9fe4a0afae057edfabdada662e11bf97e7/pycryptodome-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c", size = 2135687 }, + { url = "https://files.pythonhosted.org/packages/e5/1f/6bc4beb4adc07b847e5d3fddbec4522c2c3aa05df9e61b91dc4eff6a4946/pycryptodome-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25", size = 2164262 }, + { url = "https://files.pythonhosted.org/packages/30/4b/cbc67cda0efd55d7ddcc98374c4b9c853022a595ed1d78dd15c961bc7f6e/pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128", size = 2054347 }, + { url = "https://files.pythonhosted.org/packages/0d/08/01987ab75ca789247a88c8b2f0ce374ef7d319e79589e0842e316a272662/pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c", size = 2192762 }, + { url = "https://files.pythonhosted.org/packages/b5/bf/798630923b67f4201059c2d690105998f20a6a8fb9b5ab68d221985155b3/pycryptodome-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4", size = 2155230 }, + { url = "https://files.pythonhosted.org/packages/39/12/5fe7f5b9212dda9f5a26f842a324d6541fe1ca8059602124ff30db1e874b/pycryptodome-3.20.0-cp35-abi3-win32.whl", hash = "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72", size = 1723464 }, + { url = "https://files.pythonhosted.org/packages/1f/90/d131c0eb643290230dfa4108b7c2d135122d88b714ad241d77beb4782a76/pycryptodome-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9", size = 1759588 }, + { url = "https://files.pythonhosted.org/packages/17/87/c7153fcd400df0f4a67d7d92cdb6b5e43f309c22434374b8a61849dfb280/pycryptodome-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a", size = 1639310 }, + { url = "https://files.pythonhosted.org/packages/68/9a/88d984405b087e8c8dd9a9d4c81a6fa675454e5fcf2ae01d9553b3128637/pycryptodome-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e", size = 1708332 }, + { url = "https://files.pythonhosted.org/packages/c7/10/88fb67d2fa545ce2ac61cfda70947bcbb1769f1956315c4b919d79774897/pycryptodome-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04", size = 1565619 }, + { url = "https://files.pythonhosted.org/packages/a2/40/63dff38fa4f7888f812263494d4a745eeed180ff09dd7b8350a81eb09d21/pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3", size = 1606403 }, + { url = "https://files.pythonhosted.org/packages/8b/61/522235ca81d9dcfcf8b4cbc253b3a8a1f2231603d486369a8a02eb998f31/pycryptodome-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea", size = 1637284 }, + { url = "https://files.pythonhosted.org/packages/e9/a7/5aa0596f7fc710fd55b4e6bbb025fedacfec929465a618f20e61ebf7df76/pycryptodome-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b", size = 1741193 }, +] + +[[package]] +name = "pydantic" +version = "2.8.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8c/99/d0a5dca411e0a017762258013ba9905cd6e7baa9a3fd1fe8b6529472902e/pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a", size = 739834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/fa/b7f815b8c9ad021c07f88875b601222ef5e70619391ade4a49234d12d278/pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8", size = 423875 }, +] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/12/e3/0d5ad91211dba310f7ded335f4dad871172b9cc9ce204f5a56d76ccd6247/pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4", size = 388371 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/9d/f30f080f745682e762512f3eef1f6e392c7d74a102e6e96de8a013a5db84/pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3", size = 1837257 }, + { url = "https://files.pythonhosted.org/packages/f2/89/77e7aebdd4a235497ac1e07f0a99e9f40e47f6e0f6783fe30500df08fc42/pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6", size = 1776715 }, + { url = "https://files.pythonhosted.org/packages/18/50/5a4e9120b395108c2a0441a425356c0d26a655d7c617288bec1c28b854ac/pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a", size = 1789023 }, + { url = "https://files.pythonhosted.org/packages/c7/e5/f19e13ba86b968d024b56aa53f40b24828652ac026e5addd0ae49eeada02/pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3", size = 1775598 }, + { url = "https://files.pythonhosted.org/packages/c9/c7/f3c29bed28bd022c783baba5bf9946c4f694cb837a687e62f453c81eb5c6/pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1", size = 1977691 }, + { url = "https://files.pythonhosted.org/packages/41/3e/f62c2a05c554fff34570f6788617e9670c83ed7bc07d62a55cccd1bc0be6/pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953", size = 2693214 }, + { url = "https://files.pythonhosted.org/packages/ae/49/8a6fe79d35e2f3bea566d8ea0e4e6f436d4f749d7838c8e8c4c5148ae706/pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98", size = 2061047 }, + { url = "https://files.pythonhosted.org/packages/51/c6/585355c7c8561e11197dbf6333c57dd32f9f62165d48589b57ced2373d97/pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a", size = 1895106 }, + { url = "https://files.pythonhosted.org/packages/ce/23/829f6b87de0775919e82f8addef8b487ace1c77bb4cb754b217f7b1301b6/pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a", size = 1968506 }, + { url = "https://files.pythonhosted.org/packages/ca/2f/f8ca8f0c40b3ee0a4d8730a51851adb14c5eda986ec09f8d754b2fba784e/pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840", size = 2110217 }, + { url = "https://files.pythonhosted.org/packages/bb/a0/1876656c7b17eb69cc683452cce6bb890dd722222a71b3de57ddb512f561/pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250", size = 1709669 }, + { url = "https://files.pythonhosted.org/packages/be/4a/576524eefa9b301c088c4818dc50ff1c51a88fe29efd87ab75748ae15fd7/pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c", size = 1902386 }, + { url = "https://files.pythonhosted.org/packages/61/db/f6a724db226d990a329910727cfac43539ff6969edc217286dd05cda3ef6/pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312", size = 1834507 }, + { url = "https://files.pythonhosted.org/packages/9b/83/6f2bfe75209d557ae1c3550c1252684fc1827b8b12fbed84c3b4439e135d/pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88", size = 1773527 }, + { url = "https://files.pythonhosted.org/packages/93/ef/513ea76d7ca81f2354bb9c8d7839fc1157673e652613f7e1aff17d8ce05d/pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc", size = 1787879 }, + { url = "https://files.pythonhosted.org/packages/31/0a/ac294caecf235f0cc651de6232f1642bb793af448d1cfc541b0dc1fd72b8/pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43", size = 1774694 }, + { url = "https://files.pythonhosted.org/packages/46/a4/08f12b5512f095963550a7cb49ae010e3f8f3f22b45e508c2cb4d7744fce/pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6", size = 1976369 }, + { url = "https://files.pythonhosted.org/packages/15/59/b2495be4410462aedb399071c71884042a2c6443319cbf62d00b4a7ed7a5/pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121", size = 2691250 }, + { url = "https://files.pythonhosted.org/packages/3c/ae/fc99ce1ba791c9e9d1dee04ce80eef1dae5b25b27e3fc8e19f4e3f1348bf/pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1", size = 2061462 }, + { url = "https://files.pythonhosted.org/packages/44/bb/eb07cbe47cfd638603ce3cb8c220f1a054b821e666509e535f27ba07ca5f/pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b", size = 1893923 }, + { url = "https://files.pythonhosted.org/packages/ce/ef/5a52400553b8faa0e7f11fd7a2ba11e8d2feb50b540f9e7973c49b97eac0/pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27", size = 1966779 }, + { url = "https://files.pythonhosted.org/packages/4c/5b/fb37fe341344d9651f5c5f579639cd97d50a457dc53901aa8f7e9f28beb9/pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b", size = 2109044 }, + { url = "https://files.pythonhosted.org/packages/70/1a/6f7278802dbc66716661618807ab0dfa4fc32b09d1235923bbbe8b3a5757/pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a", size = 1708265 }, + { url = "https://files.pythonhosted.org/packages/35/7f/58758c42c61b0bdd585158586fecea295523d49933cb33664ea888162daf/pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2", size = 1901750 }, + { url = "https://files.pythonhosted.org/packages/6f/47/ef0d60ae23c41aced42921728650460dc831a0adf604bfa66b76028cb4d0/pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231", size = 1839225 }, + { url = "https://files.pythonhosted.org/packages/6a/23/430f2878c9cd977a61bb39f71751d9310ec55cee36b3d5bf1752c6341fd0/pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9", size = 1768604 }, + { url = "https://files.pythonhosted.org/packages/9e/2b/ec4e7225dee79e0dc80ccc3c35ab33cc2c4bbb8a1a7ecf060e5e453651ec/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f", size = 1789767 }, + { url = "https://files.pythonhosted.org/packages/64/b0/38b24a1fa6d2f96af3148362e10737ec073768cd44d3ec21dca3be40a519/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52", size = 1772061 }, + { url = "https://files.pythonhosted.org/packages/5e/da/bb73274c42cb60decfa61e9eb0c9029da78b3b9af0a9de0309dbc8ff87b6/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237", size = 1974573 }, + { url = "https://files.pythonhosted.org/packages/c8/65/41693110fb3552556180460daffdb8bbeefb87fc026fd9aa4b849374015c/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe", size = 2625596 }, + { url = "https://files.pythonhosted.org/packages/09/b3/a5a54b47cccd1ab661ed5775235c5e06924753c2d4817737c5667bfa19a8/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e", size = 2099064 }, + { url = "https://files.pythonhosted.org/packages/52/fa/443a7a6ea54beaba45ff3a59f3d3e6e3004b7460bcfb0be77bcf98719d3b/pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24", size = 1900345 }, + { url = "https://files.pythonhosted.org/packages/8e/e6/9aca9ffae60f9cdf0183069de3e271889b628d0fb175913fcb3db5618fb1/pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1", size = 1968252 }, + { url = "https://files.pythonhosted.org/packages/46/5e/6c716810ea20a6419188992973a73c2fb4eb99cd382368d0637ddb6d3c99/pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd", size = 2119191 }, + { url = "https://files.pythonhosted.org/packages/06/fc/6123b00a9240fbb9ae0babad7a005d51103d9a5d39c957a986f5cdd0c271/pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688", size = 1717788 }, + { url = "https://files.pythonhosted.org/packages/d5/36/e61ad5a46607a469e2786f398cd671ebafcd9fb17f09a2359985c7228df5/pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d", size = 1898188 }, + { url = "https://files.pythonhosted.org/packages/49/75/40b0e98b658fdba02a693b3bacb4c875a28bba87796c7b13975976597d8c/pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686", size = 1838688 }, + { url = "https://files.pythonhosted.org/packages/75/02/d8ba2d4a266591a6a623c68b331b96523d4b62ab82a951794e3ed8907390/pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a", size = 1768409 }, + { url = "https://files.pythonhosted.org/packages/91/ae/25ecd9bc4ce4993e99a1a3c9ab111c082630c914260e129572fafed4ecc2/pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b", size = 1789317 }, + { url = "https://files.pythonhosted.org/packages/7a/80/72057580681cdbe55699c367963d9c661b569a1d39338b4f6239faf36cdc/pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19", size = 1771949 }, + { url = "https://files.pythonhosted.org/packages/a2/be/d9bbabc55b05019013180f141fcaf3b14dbe15ca7da550e95b60c321009a/pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac", size = 1974392 }, + { url = "https://files.pythonhosted.org/packages/79/2d/7bcd938c6afb0f40293283f5f09988b61fb0a4f1d180abe7c23a2f665f8e/pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703", size = 2625565 }, + { url = "https://files.pythonhosted.org/packages/ac/88/ca758e979457096008a4b16a064509028e3e092a1e85a5ed6c18ced8da88/pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c", size = 2098784 }, + { url = "https://files.pythonhosted.org/packages/eb/de/2fad6d63c3c42e472e985acb12ec45b7f56e42e6f4cd6dfbc5e87ee8678c/pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83", size = 1900198 }, + { url = "https://files.pythonhosted.org/packages/fe/50/077c7f35b6488dc369a6d22993af3a37901e198630f38ac43391ca730f5b/pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203", size = 1968005 }, + { url = "https://files.pythonhosted.org/packages/5d/1f/f378631574ead46d636b9a04a80ff878b9365d4b361b1905ef1667d4182a/pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0", size = 2118920 }, + { url = "https://files.pythonhosted.org/packages/7a/ea/e4943f17df7a3031d709481fe4363d4624ae875a6409aec34c28c9e6cf59/pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e", size = 1717397 }, + { url = "https://files.pythonhosted.org/packages/13/63/b95781763e8d84207025071c0cec16d921c0163c7a9033ae4b9a0e020dc7/pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20", size = 1898013 }, + { url = "https://files.pythonhosted.org/packages/73/73/0c7265903f66cce39ed7ca939684fba344210cefc91ccc999cfd5b113fd3/pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906", size = 1828190 }, + { url = "https://files.pythonhosted.org/packages/27/55/60b8b0e58b49ee3ed36a18562dd7c6bc06a551c390e387af5872a238f2ec/pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94", size = 1715252 }, + { url = "https://files.pythonhosted.org/packages/28/3d/d66314bad6bb777a36559195a007b31e916bd9e2c198f7bb8f4ccdceb4fa/pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f", size = 1782641 }, + { url = "https://files.pythonhosted.org/packages/9e/f5/f178f4354d0d6c1431a8f9ede71f3c4269ac4dc55d314fdb7555814276dc/pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482", size = 1928788 }, + { url = "https://files.pythonhosted.org/packages/9c/51/1f5e27bb194df79e30b593b608c66e881ed481241e2b9ed5bdf86d165480/pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6", size = 1886116 }, + { url = "https://files.pythonhosted.org/packages/ac/76/450d9258c58dc7c70b9e3aadf6bebe23ddd99e459c365e2adbde80e238da/pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc", size = 1960125 }, + { url = "https://files.pythonhosted.org/packages/dd/9e/0309a7a4bea51771729515e413b3987be0789837de99087f7415e0db1f9b/pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99", size = 2100407 }, + { url = "https://files.pythonhosted.org/packages/af/93/06d44e08277b3b818b75bd5f25e879d7693e4b7dd3505fde89916fcc9ca2/pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6", size = 1914966 }, +] + +[[package]] +name = "pydantic-settings" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/14/7bfb313ccee79f97dc235721b035174af94ef4472cfe455c259cd2971f2f/pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88", size = 63033 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/46/7f90f36c1bdcf24962d2b7b0e11aba3bbd65ea7904cb2553072882a4e6b7/pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315", size = 23996 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pyjwt" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/68/ce067f09fca4abeca8771fe667d89cc347d1e99da3e093112ac329c6020e/pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c", size = 78825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/84/0fdf9b18ba31d69877bd39c9cd6052b47f3761e9910c15de788e519f079f/PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850", size = 22344 }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "pymeta3" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/af/409edba35fc597f1e386e3860303791ab5a28d6cc9a8aecbc567051b19a9/PyMeta3-0.5.1.tar.gz", hash = "sha256:18bda326d9a9bbf587bfc0ee0bc96864964d78b067288bcf55d4d98681d05bcb", size = 29566 } + +[[package]] +name = "pymilvus" +version = "2.3.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-storage-blob", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "environs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "minio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pandas", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyarrow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "ujson", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/54/d01f6609245ea6fbf16f7bb6d7cf28b083342abbf05dad414077279c7004/pymilvus-2.3.8.tar.gz", hash = "sha256:686e30939540114b1b7d42a8b3ab3dfcd0fa323b506e69e624c203c491db2a58", size = 1183645 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/7dd39e2199933a529523ae77788ce3490cb8519243d07f7e700488536558/pymilvus-2.3.8-py3-none-any.whl", hash = "sha256:1301bbb0252a2e7aa970be14b6c0e694242faed0f8e3c7d43ed94f61f313a536", size = 179765 }, +] + +[[package]] +name = "pymongo" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/2c/ad0896cb94668c3cad1eb702ab60ae17036b051f54cfe547f11a0322f1d3/pymongo-4.8.0.tar.gz", hash = "sha256:454f2295875744dc70f1881e4b2eb99cdad008a33574bc8aaf120530f66c0cde", size = 1506091 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/eb/3d1afb6800886174bea7f6d01112fd3e2d29d97aac884dc60524fb0d7f4f/pymongo-4.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2b7bec27e047e84947fbd41c782f07c54c30c76d14f3b8bf0c89f7413fac67a", size = 592364 }, + { url = "https://files.pythonhosted.org/packages/b1/d0/1c6b455817200d4621847db16fc081d8c7b9dc2b372c47874112e2e4500e/pymongo-4.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c68fe128a171493018ca5c8020fc08675be130d012b7ab3efe9e22698c612a1", size = 592510 }, + { url = "https://files.pythonhosted.org/packages/f2/11/17e7585041125c86c55d5a85b4dcf9949e170480502aaa21eced7fc038e5/pymongo-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920d4f8f157a71b3cb3f39bc09ce070693d6e9648fb0e30d00e2657d1dca4e49", size = 1160190 }, + { url = "https://files.pythonhosted.org/packages/d9/1b/210ae77937ecccaa72fcd3c8bf4b6a6dfbe12e973c44adab8991852687d7/pymongo-4.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b4108ac9469febba18cea50db972605cc43978bedaa9fea413378877560ef8", size = 1199235 }, + { url = "https://files.pythonhosted.org/packages/ea/a1/71a2e738379d3c719a92929a63048504270be73e60339d366f0cc2daf037/pymongo-4.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:180d5eb1dc28b62853e2f88017775c4500b07548ed28c0bd9c005c3d7bc52526", size = 1178476 }, + { url = "https://files.pythonhosted.org/packages/62/bd/b5e91ac167b57f3559e405389dad760980cf88b90824d7e9f758eacdd01c/pymongo-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aec2b9088cdbceb87e6ca9c639d0ff9b9d083594dda5ca5d3c4f6774f4c81b33", size = 1158294 }, + { url = "https://files.pythonhosted.org/packages/75/bd/9e67b191656a245612a43fc113dca0b7fbdf4a5da07815e795bcee8f475b/pymongo-4.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0cf61450feadca81deb1a1489cb1a3ae1e4266efd51adafecec0e503a8dcd84", size = 1128379 }, + { url = "https://files.pythonhosted.org/packages/63/e4/57e1e2ea95d4b3e4274f38713d591467267d20b0e08b97259287f2acd517/pymongo-4.8.0-cp310-cp310-win32.whl", hash = "sha256:8b18c8324809539c79bd6544d00e0607e98ff833ca21953df001510ca25915d1", size = 567090 }, + { url = "https://files.pythonhosted.org/packages/08/6c/fe22909894c2ba196661379ac3fc21db697904c1602ee14d5b2a15212e93/pymongo-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e5df28f74002e37bcbdfdc5109799f670e4dfef0fb527c391ff84f078050e7b5", size = 582038 }, + { url = "https://files.pythonhosted.org/packages/0a/3d/bba2845c76dddcd8c34d5014da80346851df048eefa826acb13265affba2/pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68", size = 645578 }, + { url = "https://files.pythonhosted.org/packages/c2/ca/d177c3ad846bad631b548b27c261821d25a08d608dca134aedb1b00b98fe/pymongo-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:417369ce39af2b7c2a9c7152c1ed2393edfd1cbaf2a356ba31eb8bcbd5c98dd7", size = 645731 }, + { url = "https://files.pythonhosted.org/packages/be/1a/3d9b9fb3f9de9da46919fef900fe88090f5865a09ae9e0e19496a603a819/pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d", size = 1399930 }, + { url = "https://files.pythonhosted.org/packages/57/64/281c9c8efb98ab6c6fcf44bf7cc33e17bcb163cb9c9260c9d78d2318d013/pymongo-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9365166aa801c63dff1a3cb96e650be270da06e3464ab106727223123405510f", size = 1451584 }, + { url = "https://files.pythonhosted.org/packages/37/ed/5258d22a91ea6e0b9d72e0aa7674f5a9951fea0c036d1063f29bc45a35d2/pymongo-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc8b8582f4209c2459b04b049ac03c72c618e011d3caa5391ff86d1bda0cc486", size = 1423899 }, + { url = "https://files.pythonhosted.org/packages/f3/7f/6d231046d9caf43395f9406dbef885f122edbee172ec6a3a6ea330e07848/pymongo-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e5019f75f6827bb5354b6fef8dfc9d6c7446894a27346e03134d290eb9e758", size = 1397112 }, + { url = "https://files.pythonhosted.org/packages/af/81/4074148396415ac19074a1a144e1cd6b2ff000f5ef253ed24a4e3e9ff340/pymongo-4.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b5802151fc2b51cd45492c80ed22b441d20090fb76d1fd53cd7760b340ff554", size = 1357689 }, + { url = "https://files.pythonhosted.org/packages/bc/26/799fe943573b2d86970698a0667d8d8636790e86242d979f4b3d870d269f/pymongo-4.8.0-cp311-cp311-win32.whl", hash = "sha256:4bf58e6825b93da63e499d1a58de7de563c31e575908d4e24876234ccb910eba", size = 611133 }, + { url = "https://files.pythonhosted.org/packages/51/28/577224211f43e2079126bfec53080efba46e59218f47808098f125139558/pymongo-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b747c0e257b9d3e6495a018309b9e0c93b7f0d65271d1d62e572747f4ffafc88", size = 630990 }, + { url = "https://files.pythonhosted.org/packages/9e/8d/b082d026f96215a76553032620549f931679da7f941018e2c358fd549faa/pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526", size = 699090 }, + { url = "https://files.pythonhosted.org/packages/eb/da/fa51bb7d8d5c8b4672b72c05a9357b5f9300f48128574c746fa4825f607a/pymongo-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31e4d21201bdf15064cf47ce7b74722d3e1aea2597c6785882244a3bb58c7eab", size = 698800 }, + { url = "https://files.pythonhosted.org/packages/7b/dc/78f0c931d38bece6ae1dc49035961c82f3eb42952c745391ebdd3a910222/pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf", size = 1655527 }, + { url = "https://files.pythonhosted.org/packages/74/36/92f0eeeb5111c332072e37efb1d5a668c5e4b75be53cbd06a77f6b4192d2/pymongo-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2fbdb87fe5075c8beb17a5c16348a1ea3c8b282a5cb72d173330be2fecf22f5", size = 1718203 }, + { url = "https://files.pythonhosted.org/packages/98/40/757579f837dadaddf167cd36ae85a7ab29c035bc0ae8d90bdc8a5fbdfc33/pymongo-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd39455b7ee70aabee46f7399b32ab38b86b236c069ae559e22be6b46b2bbfc4", size = 1685776 }, + { url = "https://files.pythonhosted.org/packages/24/bb/13d23966ad01511610a471eae480bcb6a94b832c40f2bdbc706f7a757b76/pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8", size = 1650569 }, + { url = "https://files.pythonhosted.org/packages/b5/80/1f405ce80cb6a3867709147e24a2f69e342ff71fb1b9ba663d0237f0c5ed/pymongo-4.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:236bbd7d0aef62e64caf4b24ca200f8c8670d1a6f5ea828c39eccdae423bc2b2", size = 1601592 }, + { url = "https://files.pythonhosted.org/packages/30/19/cd66230b6407c6b8cf45c1ae073659a88af5699c792c46fd4eaf317bd11e/pymongo-4.8.0-cp312-cp312-win32.whl", hash = "sha256:47ec8c3f0a7b2212dbc9be08d3bf17bc89abd211901093e3ef3f2adea7de7a69", size = 656042 }, + { url = "https://files.pythonhosted.org/packages/99/1c/f5108dc39450077556844abfd92b768c57775f85270fc0b1dc834ad18113/pymongo-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8", size = 680400 }, +] + +[[package]] +name = "pyparsing" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/3a/31fd28064d016a2182584d579e033ec95b809d8e220e74c4af6f0f2e8842/pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad", size = 889571 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/ea/6d76df31432a0e6fdf81681a895f009a4bb47b3c39036db3e1b528191d52/pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742", size = 103245 }, +] + +[[package]] +name = "pypika" +version = "0.48.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259 } + +[[package]] +name = "pyproject-hooks" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/07/6f63dda440d4abb191b91dc383b472dae3dd9f37e4c1e4a5c3db150531c6/pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965", size = 7838 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2", size = 9184 }, +] + +[[package]] +name = "pyreadline3" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/86/3d61a61f36a0067874a00cb4dceb9028d34b6060e47828f7fc86fb9f7ee9/pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae", size = 86465 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/fc/a3c13ded7b3057680c8ae95a9b6cc83e63657c38e0005c400a5d018a33a7/pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb", size = 95203 }, +] + +[[package]] +name = "pytest" +version = "8.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, + { name = "iniconfig", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pluggy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b4/8c/9862305bdcd6020bc7b45b1b5e7397a6caf1a33d3025b9a003b39075ffb2/pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce", size = 1439314 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5", size = 341802 }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.23.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/b4/0b378b7bf26a8ae161c3890c0b48a91a04106c5713ce81b4b080ea2f4f18/pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3", size = 46920 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2", size = 17663 }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, +] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/c4/3c310a19bc1f1e9ef50075582652673ef2bfc8cd62afef9585683821902f/pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d", size = 84060 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/82/1d96bf03ee4c0fdc3c0cbe61470070e659ca78dc0086fb88b66c185e2449/pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7", size = 46108 }, +] + +[package.optional-dependencies] +psutil = [ + { name = "psutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "pytz" +version = "2024.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/26/9f1f00a5d021fff16dee3de13d43e5e978f3d58928e129c3a62cf7eb9738/pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812", size = 316214 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/3d/a121f284241f08268b21359bd425f7d4825cffc5ac5cd0e1b3d82ffd2b10/pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319", size = 505474 }, +] + +[[package]] +name = "pywin32" +version = "306" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/dc/28c668097edfaf4eac4617ef7adf081b9cf50d254672fcf399a70f5efc41/pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d", size = 8506422 }, + { url = "https://files.pythonhosted.org/packages/d3/d6/891894edec688e72c2e308b3243fad98b4066e1839fd2fe78f04129a9d31/pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8", size = 9226392 }, + { url = "https://files.pythonhosted.org/packages/8b/1e/fc18ad83ca553e01b97aa8393ff10e33c1fb57801db05488b83282ee9913/pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407", size = 8507689 }, + { url = "https://files.pythonhosted.org/packages/7e/9e/ad6b1ae2a5ad1066dc509350e0fbf74d8d50251a51e420a2a8feaa0cecbd/pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e", size = 9227547 }, + { url = "https://files.pythonhosted.org/packages/91/20/f744bff1da8f43388498503634378dbbefbe493e65675f2cc52f7185c2c2/pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a", size = 10388324 }, + { url = "https://files.pythonhosted.org/packages/14/91/17e016d5923e178346aabda3dfec6629d1a26efe587d19667542105cf0a6/pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b", size = 8507705 }, + { url = "https://files.pythonhosted.org/packages/83/1c/25b79fc3ec99b19b0a0730cc47356f7e2959863bf9f3cd314332bddb4f68/pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e", size = 9227429 }, + { url = "https://files.pythonhosted.org/packages/1c/43/e3444dc9a12f8365d9603c2145d16bf0a2f8180f343cf87be47f5579e547/pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040", size = 10388145 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, +] + +[[package]] +name = "pyzmq" +version = "26.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "(implementation_name == 'pypy' and sys_platform == 'darwin') or (implementation_name == 'pypy' and sys_platform == 'linux') or (implementation_name == 'pypy' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/c7/01a2dd24d3f54012a85af44474cc2eb5bb40c991d5c25e0572e4cb5135a7/pyzmq-26.1.1.tar.gz", hash = "sha256:a7db05d8b7cd1a8c6610e9e9aa55d525baae7a44a43e18bc3260eb3f92de96c6", size = 271185 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/01/ea9975053adff30c34d5a42378bee171faa4a4fae0f35d1211e8f9ca6e52/pyzmq-26.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:b1bb952d1e407463c9333ea7e0c0600001e54e08ce836d4f0aff1fb3f902cf63", size = 1340141 }, + { url = "https://files.pythonhosted.org/packages/15/76/b29ef0f21b0030b42e34db728df38be7b99165899b1f587ba6fba5c2f749/pyzmq-26.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:65e2a18e845c6ea7ab849c70db932eaeadee5edede9e379eb21c0a44cf523b2e", size = 1008893 }, + { url = "https://files.pythonhosted.org/packages/72/6d/efe916dfe41133ef7bf2edcea4d170b7818324fd106cec0574bd121abb46/pyzmq-26.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:def7ae3006924b8a0c146a89ab4008310913fa903beedb95e25dea749642528e", size = 673252 }, + { url = "https://files.pythonhosted.org/packages/50/a4/96f83a39be4831c30cc8322bca50b9e8d3db7701504f526fd409e271c4f2/pyzmq-26.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8234571df7816f99dde89c3403cb396d70c6554120b795853a8ea56fcc26cd3", size = 911824 }, + { url = "https://files.pythonhosted.org/packages/cf/2e/ba7e04cfdc04e1c0be9d1581dd04cf06f53986b76cfd8ac9572f27c136bc/pyzmq-26.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18da8e84dbc30688fd2baefd41df7190607511f916be34f9a24b0e007551822e", size = 868831 }, + { url = "https://files.pythonhosted.org/packages/75/ab/09001241a7e0e81d315ad3409c48ad9e450c7699e6d3bbe70cda5fa58075/pyzmq-26.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c70dab93d98b2bf3f0ac1265edbf6e7f83acbf71dabcc4611889bb0dea45bed7", size = 868895 }, + { url = "https://files.pythonhosted.org/packages/ca/32/a2298fff3d563450fd96175731a45949a111939b35dcd5e963bf70e99de4/pyzmq-26.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fcb90592c5d5c562e1b1a1ceccf6f00036d73c51db0271bf4d352b8d6b31d468", size = 1202921 }, + { url = "https://files.pythonhosted.org/packages/96/fd/25ab3e25171dc338e66334fcc83ecac26bbf935883294a2dc548fd996e0f/pyzmq-26.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cf4be7460a0c1bc71e9b0e64ecdd75a86386ca6afaa36641686f5542d0314e9d", size = 1515366 }, + { url = "https://files.pythonhosted.org/packages/a3/2a/763b45bf6526afc17911b6dd09704034629868394e19b8efaf9014ae51bf/pyzmq-26.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4cbecda4ddbfc1e309c3be04d333f9be3fc6178b8b6592b309676f929767a15", size = 1414773 }, + { url = "https://files.pythonhosted.org/packages/36/9e/592c6f746f256f35a81cc4cfc3ecd83ed7edcc1ac85b5289d24f57c9a996/pyzmq-26.1.1-cp310-cp310-win32.whl", hash = "sha256:583f73b113b8165713b6ce028d221402b1b69483055b5aa3f991937e34dd1ead", size = 586145 }, + { url = "https://files.pythonhosted.org/packages/e7/3b/f6192bbf59a87365038106201202a3afd91012241f71719d41e83bd4a6d5/pyzmq-26.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5e6f39ecb8eb7bfcb976c49262e8cf83ff76e082b77ca23ba90c9b6691a345be", size = 650181 }, + { url = "https://files.pythonhosted.org/packages/c6/b0/77d3eb346510ffea093d9fb9d1137007f1097e39a22b915af9ff0b639557/pyzmq-26.1.1-cp310-cp310-win_arm64.whl", hash = "sha256:8d042d6446cab3a1388b38596f5acabb9926b0b95c3894c519356b577a549458", size = 552290 }, + { url = "https://files.pythonhosted.org/packages/05/e4/2226ca5357c404086a332f86f9a80dfdfc911d3aef586484c69fece5db21/pyzmq-26.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:362cac2423e36966d336d79d3ec3eafeabc153ee3e7a5cf580d7e74a34b3d912", size = 1340720 }, + { url = "https://files.pythonhosted.org/packages/f3/13/eef5c8f8169e818aef5979bdaee0b304043e98b5212ae42c0a6c77de2564/pyzmq-26.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0841633446cb1539a832a19bb24c03a20c00887d0cedd1d891b495b07e5c5cb5", size = 1008784 }, + { url = "https://files.pythonhosted.org/packages/95/08/710f6ecd9a987993c36d2a6a52526536fd59616577affaa595a4c74a756b/pyzmq-26.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e1fcdc333afbf9918d0a614a6e10858aede7da49a60f6705a77e343fe86a317", size = 673182 }, + { url = "https://files.pythonhosted.org/packages/72/e6/821458f808f009451299f592d29dcb1a98cd0826a55c789503a7cfb399fb/pyzmq-26.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc8d655627d775475eafdcf0e49e74bcc1e5e90afd9ab813b4da98f092ed7b93", size = 910164 }, + { url = "https://files.pythonhosted.org/packages/29/74/a18cf4bed0569f206b461fcf24ca4a106edd6f4736574e27ed14d7cf8dda/pyzmq-26.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32de51744820857a6f7c3077e620ab3f607d0e4388dfead885d5124ab9bcdc5e", size = 868018 }, + { url = "https://files.pythonhosted.org/packages/62/77/a01bfe7e4d49d339cf7fbee5b644c1370a4a2b755dcf643e2d7e7944a50c/pyzmq-26.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a880240597010914ffb1d6edd04d3deb7ce6a2abf79a0012751438d13630a671", size = 869265 }, + { url = "https://files.pythonhosted.org/packages/50/2f/e0b315471e0838ef227d9693b81ea7bca471564230aaa2dd73e3ba92f260/pyzmq-26.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:26131b1cec02f941ed2d2b4b8cc051662b1c248b044eff5069df1f500bbced56", size = 1203406 }, + { url = "https://files.pythonhosted.org/packages/55/cd/a9ea641afb68fe32c632b610da830766f65537dae79b4db1ea5abb788ab3/pyzmq-26.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ce05841322b58510607f9508a573138d995a46c7928887bc433de9cb760fd2ad", size = 1514267 }, + { url = "https://files.pythonhosted.org/packages/d8/d3/f86bf419202d03df579a67079ff8f9ccb4190ed467ad41f8fd091ac2e613/pyzmq-26.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32123ff0a6db521aadf2b95201e967a4e0d11fb89f73663a99d2f54881c07214", size = 1414397 }, + { url = "https://files.pythonhosted.org/packages/7a/83/061ed3bf2649fccaf6ab2b06dcb46077a09dfbc93f9b32dc675d2fd12d12/pyzmq-26.1.1-cp311-cp311-win32.whl", hash = "sha256:e790602d7ea1d6c7d8713d571226d67de7ffe47b1e22ae2c043ebd537de1bccb", size = 585281 }, + { url = "https://files.pythonhosted.org/packages/33/b2/6c355e8ca7f2ff920a5ba221732722304aaebad919109754753e678404a3/pyzmq-26.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:717960855f2d6fdc2dba9df49dff31c414187bb11c76af36343a57d1f7083d9a", size = 651006 }, + { url = "https://files.pythonhosted.org/packages/8f/7a/0187ae651393fc82fdd841581929b17509252f68b799bb787de4e48e7181/pyzmq-26.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:08956c26dbcd4fd8835cb777a16e21958ed2412317630e19f0018d49dbeeb470", size = 552828 }, + { url = "https://files.pythonhosted.org/packages/9b/b6/210ff26d3dae4ba8d0b9c0dca3299d8d7273b54f5a74a16ecd1f02c4cdd5/pyzmq-26.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e80345900ae241c2c51bead7c9fa247bba6d4b2a83423e9791bae8b0a7f12c52", size = 1343185 }, + { url = "https://files.pythonhosted.org/packages/05/23/5c74b72effed61c4087a3b549c22e4023e7ddac239ab50687733ec0ed9a6/pyzmq-26.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ec8fe214fcc45dfb0c32e4a7ad1db20244ba2d2fecbf0cbf9d5242d81ca0a375", size = 1008446 }, + { url = "https://files.pythonhosted.org/packages/c0/be/80ee4eb79b3ba87398cca66c4446d660b5e301a9d938a88d7894181fe98a/pyzmq-26.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4e283f97688d993cb7a8acbc22889effbbb7cbaa19ee9709751f44be928f5d", size = 665972 }, + { url = "https://files.pythonhosted.org/packages/19/c0/41b74b114d9ae13db6a5f414feaddf1b39b40603bc0db59f6572115cf92c/pyzmq-26.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2508bdc8ab246e5ed7c92023d4352aaad63020ca3b098a4e3f1822db202f703d", size = 903458 }, + { url = "https://files.pythonhosted.org/packages/f1/55/e5ba8f4baa7695c12a0b69baaecc3c3efac17c3a4d268a9b3400bdfa1e25/pyzmq-26.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:741bdb4d96efe8192616abdc3671931d51a8bcd38c71da2d53fb3127149265d1", size = 860090 }, + { url = "https://files.pythonhosted.org/packages/16/5c/e5043f955844c384e7daef810618893b63b57039f3116b71b9ff9f2609db/pyzmq-26.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:76154943e4c4054b2591792eb3484ef1dd23d59805759f9cebd2f010aa30ee8c", size = 860638 }, + { url = "https://files.pythonhosted.org/packages/75/9b/3c6e620db4f300057937f26b1b0f1233f4a043393aa1ae1fceefee1ba174/pyzmq-26.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9498ac427d20d0e0ef0e4bbd6200841e91640dfdf619f544ceec7f464cfb6070", size = 1196306 }, + { url = "https://files.pythonhosted.org/packages/95/06/af96f2ebe638872af78e25f13fdfe43df1d6e8dc668f2a978ef4369318c6/pyzmq-26.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f34453ef3496ca3462f30435bf85f535f9550392987341f9ccc92c102825a79", size = 1507502 }, + { url = "https://files.pythonhosted.org/packages/9f/f0/91f53f61d0e69b6c551ebe48fccc13a0f04cceaa064e1394b5d58048838b/pyzmq-26.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:50f0669324e27cc2091ef6ab76ca7112f364b6249691790b4cffce31e73fda28", size = 1406558 }, + { url = "https://files.pythonhosted.org/packages/1a/75/995c5ebb4bf06447d477a90ac090e78a39eb482567462bfa88e6351fc4ba/pyzmq-26.1.1-cp312-cp312-win32.whl", hash = "sha256:3ee5cbf2625b94de21c68d0cefd35327c8dfdbd6a98fcc41682b4e8bb00d841f", size = 584275 }, + { url = "https://files.pythonhosted.org/packages/dc/08/3e37b0c3c5e4a554e3aface4d6cf272a1b0156e376c5e667725c767ad4be/pyzmq-26.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:75bd448a28b1001b6928679015bc95dd5f172703ed30135bb9e34fc9cda0a3e7", size = 646907 }, + { url = "https://files.pythonhosted.org/packages/76/54/08e0ab926a2228a3285eec873574ab100c25a86c84844bda933048d97b80/pyzmq-26.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:4350233569b4bbef88595c5e77ee38995a6f1f1790fae148b578941bfffd1c24", size = 548851 }, + { url = "https://files.pythonhosted.org/packages/0e/01/92221845d28c7e0f7432cfaa2babbcf4bda5df1803402e063d17a8fbdc15/pyzmq-26.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8087a3281c20b1d11042d372ed5a47734af05975d78e4d1d6e7bd1018535f3", size = 1006634 }, + { url = "https://files.pythonhosted.org/packages/7b/cc/acce3be8787fb316d52402f58340c2bf288d24b3242dff4c9c4c0c597f99/pyzmq-26.1.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:ebef7d3fe11fe4c688f08bc0211a976c3318c097057f258428200737b9fff4da", size = 1340519 }, + { url = "https://files.pythonhosted.org/packages/65/aa/49a4f33dc23982eb3edd197e099f1ca67be251afb0e23388adb0f6253aab/pyzmq-26.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a5342110510045a47de1e87f5f1dcc1d9d90109522316dc9830cfc6157c800f", size = 665538 }, + { url = "https://files.pythonhosted.org/packages/ca/f8/2181c0f52344da3ffcc0a7888c21be77775480ce21a720715284c51f398b/pyzmq-26.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af690ea4be6ca92a67c2b44a779a023bf0838e92d48497a2268175dc4a505691", size = 903542 }, + { url = "https://files.pythonhosted.org/packages/31/ef/7497fbb7738db2dc93d6a04e42ddd240567d5ff7270f52b934b58536805e/pyzmq-26.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc994e220c1403ae087d7f0fa45129d583e46668a019e389060da811a5a9320e", size = 860042 }, + { url = "https://files.pythonhosted.org/packages/ae/b2/ce67ad15dac58d4d2e8747dee6211bf761f620394cd51a59d40fa8ff2727/pyzmq-26.1.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:b8e153f5dffb0310af71fc6fc9cd8174f4c8ea312c415adcb815d786fee78179", size = 860391 }, + { url = "https://files.pythonhosted.org/packages/53/49/65a008ba7b9101d163abbcce43e914d620c6d47763d4abeab522fc1bd501/pyzmq-26.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0065026e624052a51033857e5cd45a94b52946b44533f965f0bdf182460e965d", size = 1196235 }, + { url = "https://files.pythonhosted.org/packages/49/ae/43ca5a12eaae55ffe76ef5c0a21bb5ea2e9f29bb2810a2fe747e2d173372/pyzmq-26.1.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:63351392f948b5d50b9f55161994bc4feedbfb3f3cfe393d2f503dea2c3ec445", size = 1507727 }, + { url = "https://files.pythonhosted.org/packages/5e/19/7f1d1c4777742c5abadbccfac64b170b63f003ef391d7f87de7a0ac88cbd/pyzmq-26.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ffecc43b3c18e36b62fcec995761829b6ac325d8dd74a4f2c5c1653afbb4495a", size = 1406599 }, + { url = "https://files.pythonhosted.org/packages/f9/cd/5feb4af7cb3839ba6a62c284398e5777e2fb61e52236d95931093d759a4d/pyzmq-26.1.1-cp313-cp313-win32.whl", hash = "sha256:6ff14c2fae6c0c2c1c02590c5c5d75aa1db35b859971b3ca2fcd28f983d9f2b6", size = 584230 }, + { url = "https://files.pythonhosted.org/packages/ec/bc/d34e344b4e4c2c10f76da0c1a5b1f8bcef48c86e1972bfbe9f7d6ef1eaf5/pyzmq-26.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:85f2d2ee5ea9a8f1de86a300e1062fbab044f45b5ce34d20580c0198a8196db0", size = 646848 }, + { url = "https://files.pythonhosted.org/packages/46/26/9bed841b00d372083730bcb8eeb86f2ee0beff456ff07ff3eb0e92aa087a/pyzmq-26.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:cc09b1de8b985ca5a0ca343dd7fb007267c6b329347a74e200f4654268084239", size = 548564 }, + { url = "https://files.pythonhosted.org/packages/1d/36/08357e1e4df430313292b908fc7338f818ac42d3860b6d38a307fd39a205/pyzmq-26.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:bc904e86de98f8fc5bd41597da5d61232d2d6d60c4397f26efffabb961b2b245", size = 1007452 }, + { url = "https://files.pythonhosted.org/packages/99/f9/69a8d2010fa8dbb719b78f7c1c68d1e8d414c9a9e51a22c872624dda5231/pyzmq-26.1.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:00f39c367bbd6aa8e4bc36af6510561944c619b58eb36199fa334b594a18f615", size = 1329613 }, + { url = "https://files.pythonhosted.org/packages/0f/02/a9477dd620115ca3f5f2e90bdd2ab84236808ee510d20136bb8103204193/pyzmq-26.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de6f384864a959866b782e6a3896538d1424d183f2d3c7ef079f71dcecde7284", size = 653296 }, + { url = "https://files.pythonhosted.org/packages/d3/5d/f4e179aba55479648851b133b01e4546d3d06aa9a508f09cc7f3846c70fe/pyzmq-26.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3abb15df0c763339edb27a644c19381b2425ddd1aea3dbd77c1601a3b31867b8", size = 888472 }, + { url = "https://files.pythonhosted.org/packages/2b/ee/616c52d252267cf239e0061e91e67d75732e689ff53a9391637994e96d5b/pyzmq-26.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40908ec2dd3b29bbadc0916a0d3c87f8dbeebbd8fead8e618539f09e0506dec4", size = 845918 }, + { url = "https://files.pythonhosted.org/packages/78/b7/e09f159fe998cc6115fdc91665955e3ce2ac69c40b31aca25bf645b400a6/pyzmq-26.1.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c11a95d3f6fc7e714ccd1066f68f9c1abd764a8b3596158be92f46dd49f41e03", size = 847437 }, + { url = "https://files.pythonhosted.org/packages/c9/13/7b3e09e88e847cc05122284d8d0bb44d3293b54a899b2703b1d65b043695/pyzmq-26.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:4437af9fee7a58302dbd511cc49f0cc2b35c112a33a1111fb123cf0be45205ca", size = 1183545 }, + { url = "https://files.pythonhosted.org/packages/72/ca/89d6b6cc86b77fb8fa0d18662e1da4f8a1dada9304c26547fef1b2860336/pyzmq-26.1.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:76390d3d66406cb01b9681c382874400e9dfd77f30ecdea4bd1bf5226dd4aff0", size = 1492993 }, + { url = "https://files.pythonhosted.org/packages/ed/c3/ddc57994e7730a2840941228add6fe6c55d7e3199c9ca8266640cf8d53f8/pyzmq-26.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:4d4c7fe5e50e269f9c63a260638488fec194a73993008618a59b54c47ef6ae72", size = 1392544 }, + { url = "https://files.pythonhosted.org/packages/44/71/c1d407a442179359a7cf437aa4c94b1c0f31233181f05a76370bc4cc7f3c/pyzmq-26.1.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:be3fc2b11c0c384949cf1f01f9a48555039408b0f3e877863b1754225635953e", size = 907001 }, + { url = "https://files.pythonhosted.org/packages/79/fc/f550c6ccbf859e266b85a1a8daf3e93ce3a238e05413300c74610bfe9a78/pyzmq-26.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48dee75c2a9fa4f4a583d4028d564a0453447ee1277a29b07acc3743c092e259", size = 565754 }, + { url = "https://files.pythonhosted.org/packages/90/e5/eee9c82203d398664db7ed357efe89fb3fb7eb02aa383e052b9aa3e1b2da/pyzmq-26.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23f2fe4fb567e8098ebaa7204819658195b10ddd86958a97a6058eed2901eed3", size = 794374 }, + { url = "https://files.pythonhosted.org/packages/25/c5/404cbc8949e1f3ce785f23c7624a3502767f45df04a54b406625473fdb22/pyzmq-26.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:472cacd16f627c06d3c8b2d374345ab74446bae913584a6245e2aa935336d929", size = 752835 }, + { url = "https://files.pythonhosted.org/packages/06/59/aaf876e51d6307da4ffc3e870f699d65f4487913c80e926c05f5d8a30311/pyzmq-26.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8285b25aa20fcc46f1ca4afbc39fd3d5f2fe4c4bbf7f2c7f907a214e87a70024", size = 559602 }, +] + +[[package]] +name = "qdrant-client" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "portalocker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/70/5d033afb5a6f467a7cce4426a30a4113d76f6d6192b6ed0148e1847d6568/qdrant_client-1.11.0.tar.gz", hash = "sha256:7c1d4d7a96cfd1ee0cde2a21c607e9df86bcca795ad8d1fd274d295ab64b8458", size = 228713 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/08/6175fe5191e0c4adee7df1416ae361bc0d02d65ca9c1ce397679afa1484a/qdrant_client-1.11.0-py3-none-any.whl", hash = "sha256:1f574ccebb91c0bc8a620c9a41a5a010084fbc4d8c6f1cd0ab7b2eeb97336fc0", size = 258890 }, +] + +[[package]] +name = "redis" +version = "5.0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "(python_full_version < '3.11.3' and sys_platform == 'darwin') or (python_full_version < '3.11.3' and sys_platform == 'linux') or (python_full_version < '3.11.3' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/10/defc227d65ea9c2ff5244645870859865cba34da7373477c8376629746ec/redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870", size = 4595651 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/d1/19a9c76811757684a0f74adc25765c8a901d67f9f6472ac9d57c844a23c8/redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4", size = 255608 }, +] + +[package.optional-dependencies] +hiredis = [ + { name = "hiredis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "referencing" +version = "0.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rpds-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, +] + +[[package]] +name = "regex" +version = "2024.7.24" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/51/64256d0dc72816a4fe3779449627c69ec8fee5a5625fd60ba048f53b3478/regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506", size = 393485 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/97/283bd32777e6c30a9bede976cd72ba4b9aa144dc0f0f462bd37fa1a86e01/regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce", size = 470812 }, + { url = "https://files.pythonhosted.org/packages/e4/80/80bc4d7329d04ba519ebcaf26ae21d9e30d33934c458691177c623ceff70/regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024", size = 282129 }, + { url = "https://files.pythonhosted.org/packages/e5/8a/cddcb7942d05ad9a427ad97ab29f1a62c0607ab72bdb2f3a26fc5b07ac0f/regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd", size = 278909 }, + { url = "https://files.pythonhosted.org/packages/a6/d4/93b4011cb83f9a66e0fa398b4d3c6d564d94b686dace676c66502b13dae9/regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53", size = 777687 }, + { url = "https://files.pythonhosted.org/packages/d0/11/d0a12e1cecc1d35bbcbeb99e2ddcb8c1b152b1b58e2ff55f50c3d762b09e/regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca", size = 818982 }, + { url = "https://files.pythonhosted.org/packages/ae/41/01a073765d75427e24710af035d8f0a773b5cedf23f61b63e7ef2ce960d6/regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59", size = 804015 }, + { url = "https://files.pythonhosted.org/packages/3e/66/04b63f31580026c8b819aed7f171149177d10cfab27477ea8800a2268d50/regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41", size = 776517 }, + { url = "https://files.pythonhosted.org/packages/be/49/0c08a7a232e4e26e17afeedf13f331224d9377dde4876ed6e21e4a584a5d/regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5", size = 766860 }, + { url = "https://files.pythonhosted.org/packages/24/44/35769388845cdd7be97e1232a59446b738054b61bc9c92a3b0bacfaf7bb1/regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46", size = 692181 }, + { url = "https://files.pythonhosted.org/packages/50/be/4e09d5bc8de176153f209c95ca4e64b9def1748d693694a95dd4401ee7be/regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f", size = 762956 }, + { url = "https://files.pythonhosted.org/packages/90/63/b37152f25fe348aa31806bafa91df607d096e8f477fed9a5cf3de339dd5f/regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7", size = 771978 }, + { url = "https://files.pythonhosted.org/packages/ab/ac/38186431f7c1874e3f790669be933accf1090ee53aba0ab1a811ef38f07e/regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe", size = 840800 }, + { url = "https://files.pythonhosted.org/packages/e8/23/91b04dbf51a2c0ddf5b1e055e9e05ed091ebcf46f2b0e6e3d2fff121f903/regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce", size = 838991 }, + { url = "https://files.pythonhosted.org/packages/36/fd/822110cc14b99bdd7d8c61487bc774f454120cd3d7492935bf13f3399716/regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa", size = 767539 }, + { url = "https://files.pythonhosted.org/packages/82/54/e24a8adfca74f9a421cd47657c51413919e7755e729608de6f4c5556e002/regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66", size = 257712 }, + { url = "https://files.pythonhosted.org/packages/fb/cc/6485c2fc72d0de9b55392246b80921639f1be62bed1e33e982940306b5ba/regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e", size = 269661 }, + { url = "https://files.pythonhosted.org/packages/cb/ec/261f8434a47685d61e59a4ef3d9ce7902af521219f3ebd2194c7adb171a6/regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281", size = 470810 }, + { url = "https://files.pythonhosted.org/packages/f0/47/f33b1cac88841f95fff862476a9e875d9a10dae6912a675c6f13c128e5d9/regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b", size = 282126 }, + { url = "https://files.pythonhosted.org/packages/fc/1b/256ca4e2d5041c0aa2f1dc222f04412b796346ab9ce2aa5147405a9457b4/regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a", size = 278920 }, + { url = "https://files.pythonhosted.org/packages/91/03/4603ec057c0bafd2f6f50b0bdda4b12a0ff81022decf1de007b485c356a6/regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73", size = 785420 }, + { url = "https://files.pythonhosted.org/packages/75/f8/13b111fab93e6273e26de2926345e5ecf6ddad1e44c4d419d7b0924f9c52/regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2", size = 828164 }, + { url = "https://files.pythonhosted.org/packages/4a/80/bc3b9d31bd47ff578758af929af0ac1d6169b247e26fa6e87764007f3d93/regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e", size = 812621 }, + { url = "https://files.pythonhosted.org/packages/8b/77/92d4a14530900d46dddc57b728eea65d723cc9fcfd07b96c2c141dabba84/regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51", size = 786609 }, + { url = "https://files.pythonhosted.org/packages/35/58/06695fd8afad4c8ed0a53ec5e222156398b9fe5afd58887ab94ea68e4d16/regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364", size = 775290 }, + { url = "https://files.pythonhosted.org/packages/1b/0f/50b97ee1fc6965744b9e943b5c0f3740792ab54792df73d984510964ef29/regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee", size = 772849 }, + { url = "https://files.pythonhosted.org/packages/8f/64/565ff6cf241586ab7ae76bb4138c4d29bc1d1780973b457c2db30b21809a/regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c", size = 778428 }, + { url = "https://files.pythonhosted.org/packages/e5/fe/4ceabf4382e44e1e096ac46fd5e3bca490738b24157116a48270fd542e88/regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce", size = 849436 }, + { url = "https://files.pythonhosted.org/packages/68/23/1868e40d6b594843fd1a3498ffe75d58674edfc90d95e18dd87865b93bf2/regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1", size = 849484 }, + { url = "https://files.pythonhosted.org/packages/f3/52/bff76de2f6e2bc05edce3abeb7e98e6309aa022fc06071100a0216fbeb50/regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e", size = 776712 }, + { url = "https://files.pythonhosted.org/packages/f2/72/70ade7b0b5fe5c6df38fdfa2a5a8273e3ea6a10b772aa671b7e889e78bae/regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c", size = 257716 }, + { url = "https://files.pythonhosted.org/packages/04/4d/80e04f4e27ab0cbc9096e2d10696da6d9c26a39b60db52670fd57614fea5/regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52", size = 269662 }, + { url = "https://files.pythonhosted.org/packages/0f/26/f505782f386ac0399a9237571833f187414882ab6902e2e71a1ecb506835/regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86", size = 471748 }, + { url = "https://files.pythonhosted.org/packages/bb/1d/ea9a21beeb433dbfca31ab82867d69cb67ff8674af9fab6ebd55fa9d3387/regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad", size = 282841 }, + { url = "https://files.pythonhosted.org/packages/9b/f2/c6182095baf0a10169c34e87133a8e73b2e816a80035669b1278e927685e/regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9", size = 279114 }, + { url = "https://files.pythonhosted.org/packages/72/58/b5161bf890b6ca575a25685f19a4a3e3b6f4a072238814f8658123177d84/regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289", size = 789749 }, + { url = "https://files.pythonhosted.org/packages/09/fb/5381b19b62f3a3494266be462f6a015a869cf4bfd8e14d6e7db67e2c8069/regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9", size = 831666 }, + { url = "https://files.pythonhosted.org/packages/3d/6d/2a21c85f970f9be79357d12cf4b97f4fc6bf3bf6b843c39dabbc4e5f1181/regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c", size = 817544 }, + { url = "https://files.pythonhosted.org/packages/f9/ae/5f23e64f6cf170614237c654f3501a912dfb8549143d4b91d1cd13dba319/regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440", size = 790854 }, + { url = "https://files.pythonhosted.org/packages/29/0a/d04baad1bbc49cdfb4aef90c4fc875a60aaf96d35a1616f1dfe8149716bc/regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610", size = 779242 }, + { url = "https://files.pythonhosted.org/packages/3a/27/b242a962f650c3213da4596d70e24c7c1c46e3aa0f79f2a81164291085f8/regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5", size = 776932 }, + { url = "https://files.pythonhosted.org/packages/9c/ae/de659bdfff80ad2c0b577a43dd89dbc43870a4fc4bbf604e452196758e83/regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799", size = 784521 }, + { url = "https://files.pythonhosted.org/packages/d4/ac/eb6a796da0bdefbf09644a7868309423b18d344cf49963a9d36c13502d46/regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05", size = 854548 }, + { url = "https://files.pythonhosted.org/packages/56/77/fde8d825dec69e70256e0925af6c81eea9acf0a634d3d80f619d8dcd6888/regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94", size = 853345 }, + { url = "https://files.pythonhosted.org/packages/ff/04/2b79ad0bb9bc05ab4386caa2c19aa047a66afcbdfc2640618ffc729841e4/regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38", size = 781414 }, + { url = "https://files.pythonhosted.org/packages/bf/71/d0af58199283ada7d25b20e416f5b155f50aad99b0e791c0966ff5a1cd00/regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc", size = 258125 }, + { url = "https://files.pythonhosted.org/packages/95/b3/10e875c45c60b010b66fc109b899c6fc4f05d485fe1d54abff98ce791124/regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908", size = 269162 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "charset-normalizer", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 }, +] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490 }, +] + +[[package]] +name = "rich" +version = "13.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/01/c954e134dc440ab5f96952fe52b4fdc64225530320a910473c1fe270d9aa/rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432", size = 221248 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", size = 240681 }, +] + +[[package]] +name = "rpds-py" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/55/64/b693f262791b818880d17268f3f8181ef799b0d187f6f731b1772e05a29a/rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121", size = 25814 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/2d/a7e60483b72b91909e18f29a5c5ae847bac4e2ae95b77bb77e1f41819a58/rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2", size = 318432 }, + { url = "https://files.pythonhosted.org/packages/b5/b4/f15b0c55a6d880ce74170e7e28c3ed6c5acdbbd118df50b91d1dabf86008/rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f", size = 311333 }, + { url = "https://files.pythonhosted.org/packages/36/10/3f4e490fe6eb069c07c22357d0b4804cd94cb9f8d01345ef9b1d93482b9d/rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150", size = 366697 }, + { url = "https://files.pythonhosted.org/packages/f5/c8/cd6ab31b4424c7fab3b17e153b6ea7d1bb0d7cabea5c1ef683cc8adb8bc2/rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e", size = 368386 }, + { url = "https://files.pythonhosted.org/packages/60/5e/642a44fda6dda90b5237af7a0ef1d088159c30a504852b94b0396eb62125/rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2", size = 395374 }, + { url = "https://files.pythonhosted.org/packages/7c/b5/ff18c093c9e72630f6d6242e5ccb0728ef8265ba0a154b5972f89d23790a/rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3", size = 433189 }, + { url = "https://files.pythonhosted.org/packages/4a/6d/1166a157b227f2333f8e8ae320b6b7ea2a6a38fbe7a3563ad76dffc8608d/rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf", size = 354849 }, + { url = "https://files.pythonhosted.org/packages/70/a4/70ea49863ea09ae4c2971f2eef58e80b757e3c0f2f618c5815bb751f7847/rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140", size = 373233 }, + { url = "https://files.pythonhosted.org/packages/3b/d3/822a28152a1e7e2ba0dc5d06cf8736f4cd64b191bb6ec47fb51d1c3c5ccf/rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f", size = 541852 }, + { url = "https://files.pythonhosted.org/packages/c6/a5/6ef91e4425dc8b3445ff77d292fc4c5e37046462434a0423c4e0a596a8bd/rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce", size = 547630 }, + { url = "https://files.pythonhosted.org/packages/72/f8/d5625ee05c4e5c478954a16d9359069c66fe8ac8cd5ddf28f80d3b321837/rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94", size = 525766 }, + { url = "https://files.pythonhosted.org/packages/94/3c/1ff1ed6ae323b3e16fdfcdae0f0a67f373a6c3d991229dc32b499edeffb7/rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee", size = 199174 }, + { url = "https://files.pythonhosted.org/packages/ec/ba/5762c0aee2403dfea14ed74b0f8a2415cfdbb21cf745d600d9a8ac952c5b/rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399", size = 213543 }, + { url = "https://files.pythonhosted.org/packages/ab/2a/191374c52d7be0b056cc2a04d718d2244c152f915d4a8d2db2aacc526189/rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489", size = 318369 }, + { url = "https://files.pythonhosted.org/packages/0e/6a/2c9fdcc6d235ac0d61ec4fd9981184689c3e682abd05e3caa49bccb9c298/rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318", size = 311303 }, + { url = "https://files.pythonhosted.org/packages/d2/b2/725487d29633f64ef8f9cbf4729111a0b61702c8f8e94db1653930f52cce/rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db", size = 366424 }, + { url = "https://files.pythonhosted.org/packages/7a/8c/668195ab9226d01b7cf7cd9e59c1c0be1df05d602df7ec0cf46f857dcf59/rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5", size = 368359 }, + { url = "https://files.pythonhosted.org/packages/52/28/356f6a39c1adeb02cf3e5dd526f5e8e54e17899bef045397abcfbf50dffa/rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5", size = 394886 }, + { url = "https://files.pythonhosted.org/packages/a2/65/640fb1a89080a8fb6f4bebd3dafb65a2edba82e2e44c33e6eb0f3e7956f1/rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6", size = 432416 }, + { url = "https://files.pythonhosted.org/packages/a7/e8/85835077b782555d6b3416874b702ea6ebd7db1f145283c9252968670dd5/rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209", size = 354819 }, + { url = "https://files.pythonhosted.org/packages/4f/87/1ac631e923d65cbf36fbcfc6eaa702a169496de1311e54be142f178e53ee/rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3", size = 373282 }, + { url = "https://files.pythonhosted.org/packages/e4/ce/cb316f7970189e217b998191c7cf0da2ede3d5437932c86a7210dc1e9994/rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272", size = 541540 }, + { url = "https://files.pythonhosted.org/packages/90/d7/4112d7655ec8aff168ecc91d4ceb51c557336edde7e6ccf6463691a2f253/rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad", size = 547640 }, + { url = "https://files.pythonhosted.org/packages/ab/44/4f61d64dfed98cc71623f3a7fcb612df636a208b4b2c6611eaa985e130a9/rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58", size = 525555 }, + { url = "https://files.pythonhosted.org/packages/35/f2/a862d81eacb21f340d584cd1c749c289979f9a60e9229f78bffc0418a199/rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0", size = 199338 }, + { url = "https://files.pythonhosted.org/packages/cc/ec/77d0674f9af4872919f3738018558dd9d37ad3f7ad792d062eadd4af7cba/rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c", size = 213585 }, + { url = "https://files.pythonhosted.org/packages/89/b7/f9682c5cc37fcc035f4a0fc33c1fe92ec9cbfdee0cdfd071cf948f53e0df/rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6", size = 321468 }, + { url = "https://files.pythonhosted.org/packages/b8/ad/fc82be4eaceb8d444cb6fc1956ce972b3a0795104279de05e0e4131d0a47/rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b", size = 313062 }, + { url = "https://files.pythonhosted.org/packages/0e/1c/6039e80b13a08569a304dc13476dc986352dca4598e909384db043b4e2bb/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739", size = 370168 }, + { url = "https://files.pythonhosted.org/packages/dc/c9/5b9aa35acfb58946b4b785bc8e700ac313669e02fb100f3efa6176a83e81/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c", size = 371376 }, + { url = "https://files.pythonhosted.org/packages/7b/dd/0e0dbeb70d8a5357d2814764d467ded98d81d90d3570de4fb05ec7224f6b/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee", size = 397200 }, + { url = "https://files.pythonhosted.org/packages/e4/da/a47d931eb688ccfd77a7389e45935c79c41e8098d984d87335004baccb1d/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96", size = 426824 }, + { url = "https://files.pythonhosted.org/packages/0f/f7/a59a673594e6c2ff2dbc44b00fd4ecdec2fc399bb6a7bd82d612699a0121/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4", size = 357967 }, + { url = "https://files.pythonhosted.org/packages/5f/61/3ba1905396b2cb7088f9503a460b87da33452da54d478cb9241f6ad16d00/rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef", size = 378905 }, + { url = "https://files.pythonhosted.org/packages/08/31/6d0df9356b4edb0a3a077f1ef714e25ad21f9f5382fc490c2383691885ea/rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821", size = 546348 }, + { url = "https://files.pythonhosted.org/packages/ae/15/d33c021de5cb793101df9961c3c746dfc476953dbbf5db337d8010dffd4e/rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940", size = 553152 }, + { url = "https://files.pythonhosted.org/packages/70/2d/5536d28c507a4679179ab15aa0049440e4d3dd6752050fa0843ed11e9354/rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174", size = 528807 }, + { url = "https://files.pythonhosted.org/packages/e3/62/7ebe6ec0d3dd6130921f8cffb7e34afb7f71b3819aa0446a24c5e81245ec/rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139", size = 200993 }, + { url = "https://files.pythonhosted.org/packages/ec/2f/b938864d66b86a6e4acadefdc56de75ef56f7cafdfd568a6464605457bd5/rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585", size = 214458 }, + { url = "https://files.pythonhosted.org/packages/99/32/43b919a0a423c270a838ac2726b1c7168b946f2563fd99a51aaa9692d00f/rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29", size = 321465 }, + { url = "https://files.pythonhosted.org/packages/58/a9/c4d899cb28e9e47b0ff12462e8f827381f243176036f17bef9c1604667f2/rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91", size = 312900 }, + { url = "https://files.pythonhosted.org/packages/8f/90/9e51670575b5dfaa8c823369ef7d943087bfb73d4f124a99ad6ef19a2b26/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24", size = 370973 }, + { url = "https://files.pythonhosted.org/packages/fc/c1/523f2a03f853fc0d4c1acbef161747e9ab7df0a8abf6236106e333540921/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7", size = 370890 }, + { url = "https://files.pythonhosted.org/packages/51/ca/2458a771f16b0931de4d384decbe43016710bc948036c8f4562d6e063437/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9", size = 397174 }, + { url = "https://files.pythonhosted.org/packages/00/7d/6e06807f6305ea2408b364efb0eef83a6e21b5e7b5267ad6b473b9a7e416/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8", size = 426449 }, + { url = "https://files.pythonhosted.org/packages/8c/d1/6c9e65260a819a1714510a7d69ac1d68aa23ee9ce8a2d9da12187263c8fc/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879", size = 357698 }, + { url = "https://files.pythonhosted.org/packages/5d/fb/ecea8b5286d2f03eec922be7173a03ed17278944f7c124348f535116db15/rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f", size = 378530 }, + { url = "https://files.pythonhosted.org/packages/e3/e3/ac72f858957f52a109c588589b73bd2fad4a0fc82387fb55fb34aeb0f9cd/rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c", size = 545753 }, + { url = "https://files.pythonhosted.org/packages/b2/a4/a27683b519d5fc98e4390a3b130117d80fd475c67aeda8aac83c0e8e326a/rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2", size = 552443 }, + { url = "https://files.pythonhosted.org/packages/a1/ed/c074d248409b4432b1ccb2056974175fa0af2d1bc1f9c21121f80a358fa3/rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57", size = 528380 }, + { url = "https://files.pythonhosted.org/packages/d5/bd/04caf938895d2d78201e89c0c8a94dfd9990c34a19ff52fb01d0912343e3/rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a", size = 200540 }, + { url = "https://files.pythonhosted.org/packages/95/cc/109eb8b9863680411ae703664abacaa035820c7755acc9686d5dd02cdd2e/rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2", size = 214111 }, + { url = "https://files.pythonhosted.org/packages/06/39/bf1f664c347c946ef56cecaa896e3693d91acc741afa78ebb3fdb7aba08b/rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045", size = 319444 }, + { url = "https://files.pythonhosted.org/packages/c1/71/876135d3cb90d62468540b84e8e83ff4dc92052ab309bfdea7ea0b9221ad/rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc", size = 311699 }, + { url = "https://files.pythonhosted.org/packages/f7/da/8ccaeba6a3dda7467aebaf893de9eafd56275e2c90773c83bf15fb0b8374/rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02", size = 367825 }, + { url = "https://files.pythonhosted.org/packages/04/b6/02a54c47c178d180395b3c9a8bfb3b93906e08f9acf7b4a1067d27c3fae0/rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92", size = 369046 }, + { url = "https://files.pythonhosted.org/packages/a7/64/df4966743aa4def8727dc13d06527c8b13eb7412c1429def2d4701bee520/rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d", size = 395896 }, + { url = "https://files.pythonhosted.org/packages/6f/d9/7ff03ff3642c600f27ff94512bb158a8d815fea5ed4162c75a7e850d6003/rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855", size = 432427 }, + { url = "https://files.pythonhosted.org/packages/b8/c6/e1b886f7277b3454e55e85332e165091c19114eecb5377b88d892fd36ccf/rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511", size = 355403 }, + { url = "https://files.pythonhosted.org/packages/e2/62/e26bd5b944e547c7bfd0b6ca7e306bfa430f8bd298ab72a1217976a7ca8d/rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51", size = 374491 }, + { url = "https://files.pythonhosted.org/packages/c3/92/93c5a530898d3a5d1ce087455071ba714b77806ed9ffee4070d0c7a53b7e/rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075", size = 543622 }, + { url = "https://files.pythonhosted.org/packages/01/9e/d68fba289625b5d3c9d1925825d7da716fbf812bda2133ac409021d5db13/rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60", size = 548558 }, + { url = "https://files.pythonhosted.org/packages/bf/d6/4b2fad4898154365f0f2bd72ffd190349274a4c1d6a6f94f02a83bb2b8f1/rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344", size = 525753 }, + { url = "https://files.pythonhosted.org/packages/d2/ea/6f121d1802f3adae1981aea4209ea66f9d3c7f2f6d6b85ef4f13a61d17ef/rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989", size = 213529 }, +] + +[[package]] +name = "rsa" +version = "4.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/65/7d973b89c4d2351d7fb232c2e452547ddfa243e93131e7cfa766da627b52/rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21", size = 29711 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/97/fa78e3d2f65c02c8e1268b9aba606569fe97f6c8f7c2d74394553347c145/rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", size = 34315 }, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ruamel-yaml-clib", marker = "(platform_python_implementation == 'CPython' and sys_platform == 'darwin') or (platform_python_implementation == 'CPython' and sys_platform == 'linux') or (platform_python_implementation == 'CPython' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/81/4dfc17eb6ebb1aac314a3eb863c1325b907863a1b8b1382cdffcb6ac0ed9/ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b", size = 143362 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/67/8ece580cc363331d9a53055130f86b096bf16e38156e33b1d3014fffda6b/ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636", size = 117761 }, +] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/ab/bab9eb1566cd16f060b54055dd39cf6a34bfa0240c53a7218c43e974295b/ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512", size = 213824 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/01/37ac131614f71b98e9b148b2d7790662dcee92217d2fb4bac1aa377def33/ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d", size = 148236 }, + { url = "https://files.pythonhosted.org/packages/61/ee/4874c9fc96010fce85abefdcbe770650c5324288e988d7a48b527a423815/ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462", size = 133996 }, + { url = "https://files.pythonhosted.org/packages/d3/62/c60b034d9a008bbd566eeecf53a5a4c73d191c8de261290db6761802b72d/ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412", size = 526680 }, + { url = "https://files.pythonhosted.org/packages/90/8c/6cdb44f548b29eb6328b9e7e175696336bc856de2ff82e5776f860f03822/ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f", size = 605853 }, + { url = "https://files.pythonhosted.org/packages/88/30/fc45b45d5eaf2ff36cffd215a2f85e9b90ac04e70b97fd4097017abfb567/ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334", size = 655206 }, + { url = "https://files.pythonhosted.org/packages/af/dc/133547f90f744a0c827bac5411d84d4e81da640deb3af1459e38c5f3b6a0/ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d", size = 689649 }, + { url = "https://files.pythonhosted.org/packages/23/1d/589139191b187a3c750ae8d983c42fd799246d5f0dd84451a0575c9bdbe9/ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d", size = 100044 }, + { url = "https://files.pythonhosted.org/packages/4f/5b/744df20285a75ac4c606452ce9a0fcc42087d122f42294518ded1017697c/ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31", size = 117825 }, + { url = "https://files.pythonhosted.org/packages/b1/15/971b385c098e8d0d170893f5ba558452bb7b776a0c90658b8f4dd0e3382b/ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069", size = 148870 }, + { url = "https://files.pythonhosted.org/packages/01/b0/4ddef56e9f703d7909febc3a421d709a3482cda25826816ec595b73e3847/ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248", size = 134475 }, + { url = "https://files.pythonhosted.org/packages/a4/f7/22d6b620ed895a05d40802d8281eff924dc6190f682d933d4efff60db3b5/ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b", size = 544020 }, + { url = "https://files.pythonhosted.org/packages/7c/e4/0d19d65e340f93df1c47f323d95fa4b256bb28320290f5fddef90837853a/ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe", size = 642643 }, + { url = "https://files.pythonhosted.org/packages/c9/ff/f781eb5e2ae011e586d5426e2086a011cf1e0f59704a6cad1387975c5a62/ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899", size = 695832 }, + { url = "https://files.pythonhosted.org/packages/e3/41/f62e67ac651358b8f0d60cfb12ab2daf99b1b69eeaa188d0cec809d943a6/ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9", size = 730923 }, + { url = "https://files.pythonhosted.org/packages/9f/f0/19ab8acbf983cd1b37f47d27ceb8b10a738d60d36316a54bad57e0d73fbb/ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7", size = 99999 }, + { url = "https://files.pythonhosted.org/packages/ec/54/d8a795997921d87224c65d44499ca595a833093fb215b133f920c1062956/ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb", size = 118008 }, + { url = "https://files.pythonhosted.org/packages/7a/a2/eb5e9d088cb9d15c24d956944c09dca0a89108ad6e2e913c099ef36e3f0d/ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1", size = 144636 }, + { url = "https://files.pythonhosted.org/packages/66/98/8de4f22bbfd9135deb3422e96d450c4bc0a57d38c25976119307d2efe0aa/ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2", size = 135684 }, + { url = "https://files.pythonhosted.org/packages/30/d3/5fe978cd01a61c12efd24d65fa68c6f28f28c8073a06cf11db3a854390ca/ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92", size = 734571 }, + { url = "https://files.pythonhosted.org/packages/55/b3/e2531a050758b717c969cbf76c103b75d8a01e11af931b94ba656117fbe9/ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62", size = 643946 }, + { url = "https://files.pythonhosted.org/packages/0d/aa/06db7ca0995b513538402e11280282c615b5ae5f09eb820460d35fb69715/ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9", size = 692169 }, + { url = "https://files.pythonhosted.org/packages/27/38/4cf4d482b84ecdf51efae6635cc5483a83cf5ca9d9c13e205a750e251696/ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d", size = 740325 }, + { url = "https://files.pythonhosted.org/packages/6f/67/c62c6eea53a4feb042727a3d6c18f50dc99683c2b199c06bd2a9e3db8e22/ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa", size = 98639 }, + { url = "https://files.pythonhosted.org/packages/10/d2/52a3d810d0b5b3720725c0504a27b3fced7b6f310fe928f7019d79387bc1/ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b", size = 115305 }, +] + +[[package]] +name = "ruff" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/7e/82271b5ecbb72f24178eac28979380c4ba234f90be5cf92cb513605efb1a/ruff-0.6.1.tar.gz", hash = "sha256:af3ffd8c6563acb8848d33cd19a69b9bfe943667f0419ca083f8ebe4224a3436", size = 2457325 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/d1/ac5091efcc8e2cdc55733ac07f17f961465318a3fa8916e44360e32e6c73/ruff-0.6.1-py3-none-linux_armv6l.whl", hash = "sha256:b4bb7de6a24169dc023f992718a9417380301b0c2da0fe85919f47264fb8add9", size = 9610279 }, + { url = "https://files.pythonhosted.org/packages/2b/ed/c3e1c20e46f5619f133e1ddafbb1a957407ea36d42a477d0d88e9897bed9/ruff-0.6.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:45efaae53b360c81043e311cdec8a7696420b3d3e8935202c2846e7a97d4edae", size = 8719541 }, + { url = "https://files.pythonhosted.org/packages/13/49/3ee1c8dca59a8bd87ca833871d86304bce4348b2e019287e45ca0ad5b3dd/ruff-0.6.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bc60c7d71b732c8fa73cf995efc0c836a2fd8b9810e115be8babb24ae87e0850", size = 8320291 }, + { url = "https://files.pythonhosted.org/packages/2a/44/1fec4c3eac790a445f3b9e0759665439c1d88517851f3fca90e32e897d48/ruff-0.6.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c7477c3b9da822e2db0b4e0b59e61b8a23e87886e727b327e7dcaf06213c5cf", size = 10040885 }, + { url = "https://files.pythonhosted.org/packages/86/98/c0b96dda4f751accecd3c0638d8c617a3b3e6de11b4e68aa77cae72912fb/ruff-0.6.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3a0af7ab3f86e3dc9f157a928e08e26c4b40707d0612b01cd577cc84b8905cc9", size = 9414183 }, + { url = "https://files.pythonhosted.org/packages/a0/e3/59ac3b2fb4e80f53a96f2c22951589357e22ef3bc2c2b04b2a73772663f8/ruff-0.6.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392688dbb50fecf1bf7126731c90c11a9df1c3a4cdc3f481b53e851da5634fa5", size = 10203467 }, + { url = "https://files.pythonhosted.org/packages/8d/02/3dc1c33877d68341b9764b30e2dcc9209b6adb8a0a41ca04d503dc39006e/ruff-0.6.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5278d3e095ccc8c30430bcc9bc550f778790acc211865520f3041910a28d0024", size = 10962198 }, + { url = "https://files.pythonhosted.org/packages/c5/1f/a36bb06c8b724e3a8ee59124657414182227a353a98408cb5321aa87bd13/ruff-0.6.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe6d5f65d6f276ee7a0fc50a0cecaccb362d30ef98a110f99cac1c7872df2f18", size = 10537682 }, + { url = "https://files.pythonhosted.org/packages/a2/bd/479fbfab1634f2527a3f5ddb44973977f75ffbdf3d9bb16748c558a263ad/ruff-0.6.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2e0dd11e2ae553ee5c92a81731d88a9883af8db7408db47fc81887c1f8b672e", size = 11505616 }, + { url = "https://files.pythonhosted.org/packages/e0/94/92bc24e7e58d2f90fa2a370f763d25d9e06ccccfab839b88e389d79fb4e3/ruff-0.6.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d812615525a34ecfc07fd93f906ef5b93656be01dfae9a819e31caa6cfe758a1", size = 10221898 }, + { url = "https://files.pythonhosted.org/packages/f7/47/1aca18f02abd4a3ba739991b719a3aa5d8e39e0bee1a91090c8bfacdcd13/ruff-0.6.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faaa4060f4064c3b7aaaa27328080c932fa142786f8142aff095b42b6a2eb631", size = 10033784 }, + { url = "https://files.pythonhosted.org/packages/e6/48/df16d9b00af42034ee85915914783bc0529a2ff709d6d3ef39c7c15d826d/ruff-0.6.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99d7ae0df47c62729d58765c593ea54c2546d5de213f2af2a19442d50a10cec9", size = 9477381 }, + { url = "https://files.pythonhosted.org/packages/46/d6/d6eadedcc9f9c4927665eee26f4449c15f4c501e7ba9c34c37753748dc11/ruff-0.6.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9eb18dfd7b613eec000e3738b3f0e4398bf0153cb80bfa3e351b3c1c2f6d7b15", size = 9862269 }, + { url = "https://files.pythonhosted.org/packages/4e/30/e2f5b06ac048898a1cac190e1c9c0d88f984596b27f1069341217e42d119/ruff-0.6.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c62bc04c6723a81e25e71715aa59489f15034d69bf641df88cb38bdc32fd1dbb", size = 10287591 }, + { url = "https://files.pythonhosted.org/packages/a0/2c/6a17be1b3c69c03167e5b3d69317ae9b8b2a06091189161751e7a36afef5/ruff-0.6.1-py3-none-win32.whl", hash = "sha256:9fb4c4e8b83f19c9477a8745e56d2eeef07a7ff50b68a6998f7d9e2e3887bdc4", size = 7918031 }, + { url = "https://files.pythonhosted.org/packages/2e/ba/66a6c87f6532e0390ebc67d5ae9bc1064f4e14d1b0e224bdedc999ae2b15/ruff-0.6.1-py3-none-win_amd64.whl", hash = "sha256:c2ebfc8f51ef4aca05dad4552bbcf6fe8d1f75b2f6af546cc47cc1c1ca916b5b", size = 8736178 }, + { url = "https://files.pythonhosted.org/packages/14/da/418c5d40058ad56bd0fa060efa4580ccf446f916167aa6540d31f6844e16/ruff-0.6.1-py3-none-win_arm64.whl", hash = "sha256:3bc81074971b0ffad1bd0c52284b22411f02a11a012082a76ac6da153536e014", size = 8142791 }, +] + +[[package]] +name = "safetensors" +version = "0.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/41/5b/0e63bf736e171463481c5ea3406650dc25aa044083062d321820e7a1ef9f/safetensors-0.4.4.tar.gz", hash = "sha256:5fe3e9b705250d0172ed4e100a811543108653fb2b66b9e702a088ad03772a07", size = 69522 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/fa/bd12d51c70986156031c25eae2d092ad8ef8b5cadb4e684a78b620b28320/safetensors-0.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2adb497ada13097f30e386e88c959c0fda855a5f6f98845710f5bb2c57e14f12", size = 392399 }, + { url = "https://files.pythonhosted.org/packages/b7/1e/f146555161e21918e00726b2bff1e2517faa8b2953e53a5a45c5f5bef64e/safetensors-0.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7db7fdc2d71fd1444d85ca3f3d682ba2df7d61a637dfc6d80793f439eae264ab", size = 381919 }, + { url = "https://files.pythonhosted.org/packages/fb/f7/0c97595790f03ff86505c375cddf3a26b6d645ff2cbc819936287a66a744/safetensors-0.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4f0eed76b430f009fbefca1a0028ddb112891b03cb556d7440d5cd68eb89a9", size = 441235 }, + { url = "https://files.pythonhosted.org/packages/77/8b/0d1e055536f1c0ac137d446806d50d9d952bed85688d733a81913cf09367/safetensors-0.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d216fab0b5c432aabf7170883d7c11671622bde8bd1436c46d633163a703f6", size = 440000 }, + { url = "https://files.pythonhosted.org/packages/bd/85/3a73b4ff7a46dd7606f924ededc31468fd385221670d840005b8dbdb7a37/safetensors-0.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d9b76322e49c056bcc819f8bdca37a2daa5a6d42c07f30927b501088db03309", size = 477919 }, + { url = "https://files.pythonhosted.org/packages/dd/41/b832227d04a8b65b32e2be13dbe8212db0135514380148c9b81c1b08c023/safetensors-0.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32f0d1f6243e90ee43bc6ee3e8c30ac5b09ca63f5dd35dbc985a1fc5208c451a", size = 496838 }, + { url = "https://files.pythonhosted.org/packages/18/f3/27bf4d7112b194eea2d8401706953080692d37ace1b74b36fcc7234961cd/safetensors-0.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d464bdc384874601a177375028012a5f177f1505279f9456fea84bbc575c7f", size = 435539 }, + { url = "https://files.pythonhosted.org/packages/b1/98/d75bbdaca03d571e5e5e1ef600f3015cd5f9884126eb53a3377b4111fea1/safetensors-0.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63144e36209ad8e4e65384dbf2d52dd5b1866986079c00a72335402a38aacdc5", size = 457051 }, + { url = "https://files.pythonhosted.org/packages/03/e1/b7849306e47234ef548c2b32e65f2ffee0640bfad8c65e4dd37b6fee981c/safetensors-0.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:051d5ecd490af7245258000304b812825974d5e56f14a3ff7e1b8b2ba6dc2ed4", size = 619613 }, + { url = "https://files.pythonhosted.org/packages/e9/d9/cbf1316161d0a1b4b0aceeb16ddb396f49363133618cc062e4abd66b2ea9/safetensors-0.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51bc8429d9376224cd3cf7e8ce4f208b4c930cd10e515b6ac6a72cbc3370f0d9", size = 605422 }, + { url = "https://files.pythonhosted.org/packages/48/47/16ece1369794b9d3bc057a42fed0601779d21f57d0b0b1b671a78410d74d/safetensors-0.4.4-cp310-none-win32.whl", hash = "sha256:fb7b54830cee8cf9923d969e2df87ce20e625b1af2fd194222ab902d3adcc29c", size = 272398 }, + { url = "https://files.pythonhosted.org/packages/b4/a9/f28d4a8a082ef513755a1a2393a924999892142ed235aed57ab558cd1bc9/safetensors-0.4.4-cp310-none-win_amd64.whl", hash = "sha256:4b3e8aa8226d6560de8c2b9d5ff8555ea482599c670610758afdc97f3e021e9c", size = 285884 }, + { url = "https://files.pythonhosted.org/packages/0f/1b/27cea7a581019d0d674284048ff76e3a6e048bc3ae3c31cb0bfc93641180/safetensors-0.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bbaa31f2cb49013818bde319232ccd72da62ee40f7d2aa532083eda5664e85ff", size = 392373 }, + { url = "https://files.pythonhosted.org/packages/36/46/93c39c96188a88ca15d12759bb51f52ce7365f6fd19ef09580bc096e8860/safetensors-0.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fdcb80f4e9fbb33b58e9bf95e7dbbedff505d1bcd1c05f7c7ce883632710006", size = 381488 }, + { url = "https://files.pythonhosted.org/packages/37/a2/93cab60b8e2c8ea6343a04cdd2c09c860c9640eaaffbf8b771a0e8f98e7d/safetensors-0.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55c14c20be247b8a1aeaf3ab4476265e3ca83096bb8e09bb1a7aa806088def4f", size = 441025 }, + { url = "https://files.pythonhosted.org/packages/19/37/2a5220dce5eff841328bfc3071f4a7063f3eb12341893b2688669fc67115/safetensors-0.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:949aaa1118660f992dbf0968487b3e3cfdad67f948658ab08c6b5762e90cc8b6", size = 439791 }, + { url = "https://files.pythonhosted.org/packages/f8/93/1d894ff44df26baf4c2471a5874388361390d3cb1cc4811cff40fc01373e/safetensors-0.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c11a4ab7debc456326a2bac67f35ee0ac792bcf812c7562a4a28559a5c795e27", size = 477752 }, + { url = "https://files.pythonhosted.org/packages/a5/17/b697f517c7ffb8d62d1ef17c6224c00edbb96b931e565d887476a51ac803/safetensors-0.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0cea44bba5c5601b297bc8307e4075535b95163402e4906b2e9b82788a2a6df", size = 496019 }, + { url = "https://files.pythonhosted.org/packages/af/b9/c33f69f4dad9c65209efb76c2be6968af5219e31ccfd344a0025d972252f/safetensors-0.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9d752c97f6bbe327352f76e5b86442d776abc789249fc5e72eacb49e6916482", size = 435416 }, + { url = "https://files.pythonhosted.org/packages/71/59/f6480a68df2f4fb5aefae45a800d9bc043c0549210075275fef190a896ce/safetensors-0.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03f2bb92e61b055ef6cc22883ad1ae898010a95730fa988c60a23800eb742c2c", size = 456771 }, + { url = "https://files.pythonhosted.org/packages/09/01/2a7507cdf7318fb68596e6537ef81e83cfc171c483b4a786b9c947368e19/safetensors-0.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:87bf3f91a9328a941acc44eceffd4e1f5f89b030985b2966637e582157173b98", size = 619456 }, + { url = "https://files.pythonhosted.org/packages/80/b3/4bb5b1fb025cb8c81fe8a76371334860a9c276fade616f83fd53feef2740/safetensors-0.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:20d218ec2b6899d29d6895419a58b6e44cc5ff8f0cc29fac8d236a8978ab702e", size = 605125 }, + { url = "https://files.pythonhosted.org/packages/09/93/0d6d54b84eff8361dc257fa306ae0ef1899025a2d9657efe8384ac8b7267/safetensors-0.4.4-cp311-none-win32.whl", hash = "sha256:8079486118919f600c603536e2490ca37b3dbd3280e3ad6eaacfe6264605ac8a", size = 272273 }, + { url = "https://files.pythonhosted.org/packages/21/4f/5ee44681c7ea827f9d3c104ca429865b41c05a4163eff7f0599152c2e682/safetensors-0.4.4-cp311-none-win_amd64.whl", hash = "sha256:2f8c2eb0615e2e64ee27d478c7c13f51e5329d7972d9e15528d3e4cfc4a08f0d", size = 285982 }, + { url = "https://files.pythonhosted.org/packages/e2/41/a491dbe3fc1c195ce648939a87d3b4b3800eaade2f05278a6dc02b575c51/safetensors-0.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:baec5675944b4a47749c93c01c73d826ef7d42d36ba8d0dba36336fa80c76426", size = 391372 }, + { url = "https://files.pythonhosted.org/packages/3a/a1/d99aa8d10fa8d82276ee2aaa87afd0a6b96e69c128eaa9f93524b52c5276/safetensors-0.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f15117b96866401825f3e94543145028a2947d19974429246ce59403f49e77c6", size = 381800 }, + { url = "https://files.pythonhosted.org/packages/c8/1c/4fa05b79afdd4688a357a42433565b5b09137af6b4f6cd0c9e371466e2f1/safetensors-0.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a13a9caea485df164c51be4eb0c87f97f790b7c3213d635eba2314d959fe929", size = 440817 }, + { url = "https://files.pythonhosted.org/packages/65/c0/152b059debd3cee4f44b7df972e915a38f776379ea99ce4a3cbea3f78dbd/safetensors-0.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b54bc4ca5f9b9bba8cd4fb91c24b2446a86b5ae7f8975cf3b7a277353c3127c", size = 439483 }, + { url = "https://files.pythonhosted.org/packages/9c/93/20c05daeecf6fa93b9403c3660df1d983d7ddd5cdb3e3710ff41b72754dd/safetensors-0.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08332c22e03b651c8eb7bf5fc2de90044f3672f43403b3d9ac7e7e0f4f76495e", size = 476631 }, + { url = "https://files.pythonhosted.org/packages/84/2f/bfe3e54b7dbcaef3f10b8f3c71146790ab18b0bd79ad9ca2bc2c950b68df/safetensors-0.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb62841e839ee992c37bb75e75891c7f4904e772db3691c59daaca5b4ab960e1", size = 493575 }, + { url = "https://files.pythonhosted.org/packages/1b/0b/2a1b405131f26b95acdb3ed6c8e3a8c84de72d364fd26202d43e68ec4bad/safetensors-0.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5b927acc5f2f59547270b0309a46d983edc44be64e1ca27a7fcb0474d6cd67", size = 434891 }, + { url = "https://files.pythonhosted.org/packages/31/ce/cad390a08128ebcb74be79a1e03c496a4773059b2541c6a97a52fd1705fb/safetensors-0.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a69c71b1ae98a8021a09a0b43363b0143b0ce74e7c0e83cacba691b62655fb8", size = 457631 }, + { url = "https://files.pythonhosted.org/packages/9f/83/d9d6e6a45d624c27155f4336af8e7b2bcde346137f6460dcd5e1bcdc2e3f/safetensors-0.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23654ad162c02a5636f0cd520a0310902c4421aab1d91a0b667722a4937cc445", size = 619367 }, + { url = "https://files.pythonhosted.org/packages/9f/20/b37e1ae87cb83a1c2fe5cf0710bab12d6f186474cbbdda4fda2d7d57d225/safetensors-0.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0677c109d949cf53756859160b955b2e75b0eefe952189c184d7be30ecf7e858", size = 605302 }, + { url = "https://files.pythonhosted.org/packages/99/5a/9237f1d0adba5eec3711d7c1911b3111631a86779d692fe8ad2cd709d6a4/safetensors-0.4.4-cp312-none-win32.whl", hash = "sha256:a51d0ddd4deb8871c6de15a772ef40b3dbd26a3c0451bb9e66bc76fc5a784e5b", size = 273434 }, + { url = "https://files.pythonhosted.org/packages/b9/dd/b11f3a33fe7b6c94fde08b3de094b93d3438d67922ef90bcb5002e306e0b/safetensors-0.4.4-cp312-none-win_amd64.whl", hash = "sha256:2d065059e75a798bc1933c293b68d04d79b586bb7f8c921e0ca1e82759d0dbb1", size = 286347 }, + { url = "https://files.pythonhosted.org/packages/b3/d6/7a4db869a295b57066e1399eb467c38df86439d3766c850ca8eb75b5e3a3/safetensors-0.4.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9d625692578dd40a112df30c02a1adf068027566abd8e6a74893bb13d441c150", size = 391373 }, + { url = "https://files.pythonhosted.org/packages/1e/97/de856ad42ef65822ff982e7af7fc889cd717240672b45c647af7ea05c631/safetensors-0.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7cabcf39c81e5b988d0adefdaea2eb9b4fd9bd62d5ed6559988c62f36bfa9a89", size = 382523 }, + { url = "https://files.pythonhosted.org/packages/07/d2/d9316af4c15b4ca0362cb4498abe47be6e04f7119f3ccf697e38ee04d33b/safetensors-0.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8359bef65f49d51476e9811d59c015f0ddae618ee0e44144f5595278c9f8268c", size = 441039 }, + { url = "https://files.pythonhosted.org/packages/e8/ac/478e910c891feadb693316b31447f14929b7047a612df9b628589b89be3c/safetensors-0.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a32c662e7df9226fd850f054a3ead0e4213a96a70b5ce37b2d26ba27004e013", size = 439516 }, + { url = "https://files.pythonhosted.org/packages/81/43/f9929e854c4fcca98459f03de003d9619dd5f7d10d74e03df7af9907b119/safetensors-0.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c329a4dcc395364a1c0d2d1574d725fe81a840783dda64c31c5a60fc7d41472c", size = 477242 }, + { url = "https://files.pythonhosted.org/packages/0a/4d/b754f59fe395ea5bd8531c090c557e161fffed1753eeb3d87c0f8eaa62c4/safetensors-0.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:239ee093b1db877c9f8fe2d71331a97f3b9c7c0d3ab9f09c4851004a11f44b65", size = 494615 }, + { url = "https://files.pythonhosted.org/packages/54/7d/b26801dab2ecb499eb1ebdb46be65600b49bb062fe12b298150695a6e23c/safetensors-0.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd574145d930cf9405a64f9923600879a5ce51d9f315443a5f706374841327b6", size = 434933 }, + { url = "https://files.pythonhosted.org/packages/e2/40/0f6627ad98e21e620a6835f02729f6b701804d3c452f8773648cbd0b9c2c/safetensors-0.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f6784eed29f9e036acb0b7769d9e78a0dc2c72c2d8ba7903005350d817e287a4", size = 457646 }, + { url = "https://files.pythonhosted.org/packages/30/1e/7f7819d1be7c36fbedcb7099a461b79e0ed19631b3ca5595e0f81501bb2c/safetensors-0.4.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:65a4a6072436bf0a4825b1c295d248cc17e5f4651e60ee62427a5bcaa8622a7a", size = 619204 }, + { url = "https://files.pythonhosted.org/packages/b1/58/e91e8c9888303919ce56f038fcad4147431fd95630890799bf8c928d1d34/safetensors-0.4.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:df81e3407630de060ae8313da49509c3caa33b1a9415562284eaf3d0c7705f9f", size = 605400 }, + { url = "https://files.pythonhosted.org/packages/dd/fd/7a760367b62752e8c6d57c3759eaa57e5b47f55524bba3d803e03f922f95/safetensors-0.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1d1f34c71371f0e034004a0b583284b45d233dd0b5f64a9125e16b8a01d15067", size = 393406 }, + { url = "https://files.pythonhosted.org/packages/dd/21/628d56eeae4bd0dcb5b11a9ec4001a50d2f85b726b10a864f72f34ba486f/safetensors-0.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a8043a33d58bc9b30dfac90f75712134ca34733ec3d8267b1bd682afe7194f5", size = 383386 }, + { url = "https://files.pythonhosted.org/packages/19/27/699124b4c6c27b7860140bac7ee6c50bde104e55951f8f5163f9ad20faa9/safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db8f0c59c84792c12661f8efa85de160f80efe16b87a9d5de91b93f9e0bce3c", size = 442158 }, + { url = "https://files.pythonhosted.org/packages/23/01/85a621bdded944d6800f654c823a00df513263f1921a96d67d7fceb2ffb9/safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfc1fc38e37630dd12d519bdec9dcd4b345aec9930bb9ce0ed04461f49e58b52", size = 436170 }, + { url = "https://files.pythonhosted.org/packages/4f/a3/b15adfffc6c8faaae6416f5c70ee4c64e4986b630b4ada18a314228a15e2/safetensors-0.4.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c9d86d9b13b18aafa88303e2cd21e677f5da2a14c828d2c460fe513af2e9a5", size = 458196 }, + { url = "https://files.pythonhosted.org/packages/8c/c1/ca829972be495326b5a986fe15e2ef16ecc4c35959942555091938f457af/safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:43251d7f29a59120a26f5a0d9583b9e112999e500afabcfdcb91606d3c5c89e3", size = 620510 }, + { url = "https://files.pythonhosted.org/packages/e7/50/89e5eac4120b55422450d5221c86d526ace14e222ea3f6c0c005f8f011ec/safetensors-0.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2c42e9b277513b81cf507e6121c7b432b3235f980cac04f39f435b7902857f91", size = 606993 }, +] + +[[package]] +name = "scikit-learn" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "scipy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "threadpoolctl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/72/2961b9874a9ddf2b0f95f329d4e67f67c3301c1d88ba5e239ff25661bb85/scikit_learn-1.5.1.tar.gz", hash = "sha256:0ea5d40c0e3951df445721927448755d3fe1d80833b0b7308ebff5d2a45e6414", size = 6958368 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/59/d8ea8c05e61d2afa988dfcfe47526595b531e94d23babf58d2e00a35f646/scikit_learn-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:781586c414f8cc58e71da4f3d7af311e0505a683e112f2f62919e3019abd3745", size = 12102257 }, + { url = "https://files.pythonhosted.org/packages/1f/c6/ba8e5691acca616adc8f0d6f8f5e79d55b927530aa404ee712b077acf0cf/scikit_learn-1.5.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5b213bc29cc30a89a3130393b0e39c847a15d769d6e59539cd86b75d276b1a7", size = 10975310 }, + { url = "https://files.pythonhosted.org/packages/5c/c6/e362563cc7dfe37e4699cbf2b2d22c2854be227c254976de1c4854fc6e84/scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff4ba34c2abff5ec59c803ed1d97d61b036f659a17f55be102679e88f926fac", size = 12496508 }, + { url = "https://files.pythonhosted.org/packages/f2/60/6c589c91e474721efdcec82ea9cc5c743359e52637e46c364ee5236666ef/scikit_learn-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:161808750c267b77b4a9603cf9c93579c7a74ba8486b1336034c2f1579546d21", size = 13352348 }, + { url = "https://files.pythonhosted.org/packages/f1/13/de29b945fb28fc0c24159d3a83f1250c5232c1c9abac12434c7c3447e9cc/scikit_learn-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:10e49170691514a94bb2e03787aa921b82dbc507a4ea1f20fd95557862c98dc1", size = 10966250 }, + { url = "https://files.pythonhosted.org/packages/03/86/ab9f95e338c5ef5b4e79463ee91e55aae553213835e59bf038bc0cc21bf8/scikit_learn-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:154297ee43c0b83af12464adeab378dee2d0a700ccd03979e2b821e7dd7cc1c2", size = 12087598 }, + { url = "https://files.pythonhosted.org/packages/7d/d7/fb80c63062b60b1fa5dcb2d4dd3a4e83bd8c68cdc83cf6ff8c016228f184/scikit_learn-1.5.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b5e865e9bd59396220de49cb4a57b17016256637c61b4c5cc81aaf16bc123bbe", size = 10979067 }, + { url = "https://files.pythonhosted.org/packages/c1/f8/fd3fa610cac686952d8c78b8b44cf5263c6c03885bd8e5d5819c684b44e8/scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909144d50f367a513cee6090873ae582dba019cb3fca063b38054fa42704c3a4", size = 12485469 }, + { url = "https://files.pythonhosted.org/packages/32/63/ed228892adad313aab0d0f9261241e7bf1efe36730a2788ad424bcad00ca/scikit_learn-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689b6f74b2c880276e365fe84fe4f1befd6a774f016339c65655eaff12e10cbf", size = 13335048 }, + { url = "https://files.pythonhosted.org/packages/5d/55/0403bf2031250ac982c8053397889fbc5a3a2b3798b913dae4f51c3af6a4/scikit_learn-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:9a07f90846313a7639af6a019d849ff72baadfa4c74c778821ae0fad07b7275b", size = 10988436 }, + { url = "https://files.pythonhosted.org/packages/b1/8d/cf392a56e24627093a467642c8b9263052372131359b570df29aaf4811ab/scikit_learn-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5944ce1faada31c55fb2ba20a5346b88e36811aab504ccafb9f0339e9f780395", size = 12102404 }, + { url = "https://files.pythonhosted.org/packages/d5/2c/734fc9269bdb6768905ac41b82d75264b26925b1e462f4ebf45fe4f17646/scikit_learn-1.5.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0828673c5b520e879f2af6a9e99eee0eefea69a2188be1ca68a6121b809055c1", size = 11037398 }, + { url = "https://files.pythonhosted.org/packages/d3/a9/15774b178bcd1cde1c470adbdb554e1504dce7c302e02ff736c90d65e014/scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508907e5f81390e16d754e8815f7497e52139162fd69c4fdbd2dfa5d6cc88915", size = 12089887 }, + { url = "https://files.pythonhosted.org/packages/8a/5d/047cde25131eef3a38d03317fa7d25d6f60ce6e8ccfd24ac88b3e309fc00/scikit_learn-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97625f217c5c0c5d0505fa2af28ae424bd37949bb2f16ace3ff5f2f81fb4498b", size = 13079093 }, + { url = "https://files.pythonhosted.org/packages/cb/be/dec2a8d31d133034a8ec51ae68ac564ec9bde1c78a64551f1438c3690b9e/scikit_learn-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:da3f404e9e284d2b0a157e1b56b6566a34eb2798205cba35a211df3296ab7a74", size = 10945350 }, +] + +[[package]] +name = "scipy" +version = "1.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/11/4d44a1f274e002784e4dbdb81e0ea96d2de2d1045b2132d5af62cc31fd28/scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417", size = 58620554 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/68/3bc0cfaf64ff507d82b1e5d5b64521df4c8bf7e22bc0b897827cbee9872c/scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389", size = 39069598 }, + { url = "https://files.pythonhosted.org/packages/43/a5/8d02f9c372790326ad405d94f04d4339482ec082455b9e6e288f7100513b/scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3", size = 29879676 }, + { url = "https://files.pythonhosted.org/packages/07/42/0e0bea9666fcbf2cb6ea0205db42c81b1f34d7b729ba251010edf9c80ebd/scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0", size = 23088696 }, + { url = "https://files.pythonhosted.org/packages/15/47/298ab6fef5ebf31b426560e978b8b8548421d4ed0bf99263e1eb44532306/scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3", size = 25470699 }, + { url = "https://files.pythonhosted.org/packages/d8/df/cdb6be5274bc694c4c22862ac3438cb04f360ed9df0aecee02ce0b798380/scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d", size = 35606631 }, + { url = "https://files.pythonhosted.org/packages/47/78/b0c2c23880dd1e99e938ad49ccfb011ae353758a2dc5ed7ee59baff684c3/scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69", size = 41178528 }, + { url = "https://files.pythonhosted.org/packages/5d/aa/994b45c34b897637b853ec04334afa55a85650a0d11dacfa67232260fb0a/scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad", size = 42784535 }, + { url = "https://files.pythonhosted.org/packages/e7/1c/8daa6df17a945cb1a2a1e3bae3c49643f7b3b94017ff01a4787064f03f84/scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5", size = 44772117 }, + { url = "https://files.pythonhosted.org/packages/b2/ab/070ccfabe870d9f105b04aee1e2860520460ef7ca0213172abfe871463b9/scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675", size = 39076999 }, + { url = "https://files.pythonhosted.org/packages/a7/c5/02ac82f9bb8f70818099df7e86c3ad28dae64e1347b421d8e3adf26acab6/scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2", size = 29894570 }, + { url = "https://files.pythonhosted.org/packages/ed/05/7f03e680cc5249c4f96c9e4e845acde08eb1aee5bc216eff8a089baa4ddb/scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617", size = 23103567 }, + { url = "https://files.pythonhosted.org/packages/5e/fc/9f1413bef53171f379d786aabc104d4abeea48ee84c553a3e3d8c9f96a9c/scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8", size = 25499102 }, + { url = "https://files.pythonhosted.org/packages/c2/4b/b44bee3c2ddc316b0159b3d87a3d467ef8d7edfd525e6f7364a62cd87d90/scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37", size = 35586346 }, + { url = "https://files.pythonhosted.org/packages/93/6b/701776d4bd6bdd9b629c387b5140f006185bd8ddea16788a44434376b98f/scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2", size = 41165244 }, + { url = "https://files.pythonhosted.org/packages/06/57/e6aa6f55729a8f245d8a6984f2855696c5992113a5dc789065020f8be753/scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2", size = 42817917 }, + { url = "https://files.pythonhosted.org/packages/ea/c2/5ecadc5fcccefaece775feadcd795060adf5c3b29a883bff0e678cfe89af/scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94", size = 44781033 }, + { url = "https://files.pythonhosted.org/packages/c0/04/2bdacc8ac6387b15db6faa40295f8bd25eccf33f1f13e68a72dc3c60a99e/scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d", size = 39128781 }, + { url = "https://files.pythonhosted.org/packages/c8/53/35b4d41f5fd42f5781dbd0dd6c05d35ba8aa75c84ecddc7d44756cd8da2e/scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07", size = 29939542 }, + { url = "https://files.pythonhosted.org/packages/66/67/6ef192e0e4d77b20cc33a01e743b00bc9e68fb83b88e06e636d2619a8767/scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5", size = 23148375 }, + { url = "https://files.pythonhosted.org/packages/f6/32/3a6dedd51d68eb7b8e7dc7947d5d841bcb699f1bf4463639554986f4d782/scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc", size = 25578573 }, + { url = "https://files.pythonhosted.org/packages/f0/5a/efa92a58dc3a2898705f1dc9dbaf390ca7d4fba26d6ab8cfffb0c72f656f/scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310", size = 35319299 }, + { url = "https://files.pythonhosted.org/packages/8e/ee/8a26858ca517e9c64f84b4c7734b89bda8e63bec85c3d2f432d225bb1886/scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066", size = 40849331 }, + { url = "https://files.pythonhosted.org/packages/a5/cd/06f72bc9187840f1c99e1a8750aad4216fc7dfdd7df46e6280add14b4822/scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1", size = 42544049 }, + { url = "https://files.pythonhosted.org/packages/aa/7d/43ab67228ef98c6b5dd42ab386eae2d7877036970a0d7e3dd3eb47a0d530/scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f", size = 44521212 }, + { url = "https://files.pythonhosted.org/packages/50/ef/ac98346db016ff18a6ad7626a35808f37074d25796fd0234c2bb0ed1e054/scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79", size = 39091068 }, + { url = "https://files.pythonhosted.org/packages/b9/cc/70948fe9f393b911b4251e96b55bbdeaa8cca41f37c26fd1df0232933b9e/scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e", size = 29875417 }, + { url = "https://files.pythonhosted.org/packages/3b/2e/35f549b7d231c1c9f9639f9ef49b815d816bf54dd050da5da1c11517a218/scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73", size = 23084508 }, + { url = "https://files.pythonhosted.org/packages/3f/d6/b028e3f3e59fae61fb8c0f450db732c43dd1d836223a589a8be9f6377203/scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e", size = 25503364 }, + { url = "https://files.pythonhosted.org/packages/a7/2f/6c142b352ac15967744d62b165537a965e95d557085db4beab2a11f7943b/scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d", size = 35292639 }, + { url = "https://files.pythonhosted.org/packages/56/46/2449e6e51e0d7c3575f289f6acb7f828938eaab8874dbccfeb0cd2b71a27/scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e", size = 40798288 }, + { url = "https://files.pythonhosted.org/packages/32/cd/9d86f7ed7f4497c9fd3e39f8918dd93d9f647ba80d7e34e4946c0c2d1a7c/scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06", size = 42524647 }, + { url = "https://files.pythonhosted.org/packages/f5/1b/6ee032251bf4cdb0cc50059374e86a9f076308c1512b61c4e003e241efb7/scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84", size = 44469524 }, +] + +[[package]] +name = "semantic-kernel" +version = "1.11.0" +source = { editable = "." } +dependencies = [ + { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "azure-identity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "defusedxml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nest-asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "openapi-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-api", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "opentelemetry-sdk", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "prance", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pybars4", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic-settings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[package.optional-dependencies] +anthropic = [ + { name = "anthropic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +azure = [ + { name = "azure-ai-inference", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "azure-cosmos", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "azure-identity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "azure-search-documents", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +chroma = [ + { name = "chromadb", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +google = [ + { name = "google-cloud-aiplatform", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "google-generativeai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +hugging-face = [ + { name = "sentence-transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "torch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "transformers", extra = ["torch"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +milvus = [ + { name = "milvus", marker = "(platform_system != 'Windows' and sys_platform == 'darwin') or (platform_system != 'Windows' and sys_platform == 'linux') or (platform_system != 'Windows' and sys_platform == 'win32')" }, + { name = "pymilvus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +mistralai = [ + { name = "mistralai", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +mongo = [ + { name = "motor", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +notebooks = [ + { name = "ipykernel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +ollama = [ + { name = "ollama", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +pandas = [ + { name = "pandas", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +pinecone = [ + { name = "pinecone-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +postgres = [ + { name = "psycopg", extra = ["binary", "pool"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +qdrant = [ + { name = "qdrant-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +redis = [ + { name = "redis", extra = ["hiredis"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "types-redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +usearch = [ + { name = "pyarrow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "usearch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +weaviate = [ + { name = "weaviate-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[package.dev-dependencies] +dev = [ + { name = "ipykernel", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "mypy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nbconvert", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pre-commit", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-cov", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pytest-xdist", extra = ["psutil"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "ruff", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "snoop", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "types-pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", specifier = "~=3.8" }, + { name = "anthropic", marker = "extra == 'anthropic'", specifier = "~=0.32" }, + { name = "azure-ai-inference", marker = "extra == 'azure'", specifier = ">=1.0.0b3" }, + { name = "azure-cosmos", marker = "extra == 'azure'", specifier = "~=4.7" }, + { name = "azure-identity", specifier = "~=1.13" }, + { name = "azure-identity", marker = "extra == 'azure'", specifier = "~=1.13" }, + { name = "azure-search-documents", marker = "extra == 'azure'", specifier = ">=11.6.0b4" }, + { name = "chromadb", marker = "extra == 'chroma'", specifier = ">=0.4,<0.6" }, + { name = "defusedxml", specifier = "~=0.7" }, + { name = "google-cloud-aiplatform", marker = "extra == 'google'", specifier = "~=1.60" }, + { name = "google-generativeai", marker = "extra == 'google'", specifier = "~=0.7" }, + { name = "ipykernel", marker = "extra == 'notebooks'", specifier = "~=6.29" }, + { name = "jinja2", specifier = "~=3.1" }, + { name = "milvus", marker = "platform_system != 'Windows' and extra == 'milvus'", specifier = ">=2.3,<2.3.8" }, + { name = "mistralai", marker = "extra == 'mistralai'", specifier = ">=0.4,<2.0" }, + { name = "motor", marker = "extra == 'mongo'", specifier = ">=3.3.2,<3.7.0" }, + { name = "nest-asyncio", specifier = "~=1.6" }, + { name = "numpy", marker = "python_full_version < '3.12'", specifier = ">=1.25.0" }, + { name = "numpy", marker = "python_full_version >= '3.12'", specifier = ">=1.26.0" }, + { name = "ollama", marker = "extra == 'ollama'", specifier = "~=0.2" }, + { name = "openai", specifier = "~=1.0" }, + { name = "openapi-core", specifier = ">=0.18,<0.20" }, + { name = "opentelemetry-api", specifier = "~=1.24" }, + { name = "opentelemetry-sdk", specifier = "~=1.24" }, + { name = "pandas", marker = "extra == 'pandas'", specifier = "~=2.2" }, + { name = "pinecone-client", marker = "extra == 'pinecone'", specifier = "~=5.0" }, + { name = "prance", specifier = "~=23.6.21.0" }, + { name = "psycopg", extras = ["binary", "pool"], marker = "extra == 'postgres'", specifier = "~=3.2" }, + { name = "pyarrow", marker = "extra == 'usearch'", specifier = ">=12.0,<18.0" }, + { name = "pybars4", specifier = "~=0.9" }, + { name = "pydantic", specifier = "~=2.0" }, + { name = "pydantic-settings", specifier = "~=2.0" }, + { name = "pymilvus", marker = "extra == 'milvus'", specifier = ">=2.3,<2.5" }, + { name = "qdrant-client", marker = "extra == 'qdrant'", specifier = "~=1.9" }, + { name = "redis", extras = ["hiredis"], marker = "extra == 'redis'", specifier = "~=5.0" }, + { name = "sentence-transformers", marker = "extra == 'hugging-face'", specifier = ">=2.2,<4.0" }, + { name = "torch", marker = "extra == 'hugging-face'", specifier = "==2.4.1" }, + { name = "transformers", extras = ["torch"], marker = "extra == 'hugging-face'", specifier = "~=4.28" }, + { name = "types-redis", marker = "extra == 'redis'", specifier = "~=4.6.0.20240425" }, + { name = "usearch", marker = "extra == 'usearch'", specifier = "~=2.9" }, + { name = "weaviate-client", marker = "extra == 'weaviate'", specifier = ">=3.18,<5.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "ipykernel", specifier = "~=6.29" }, + { name = "mypy", specifier = ">=1.10" }, + { name = "nbconvert", specifier = "~=7.16" }, + { name = "pre-commit", specifier = "~=3.7" }, + { name = "pytest", specifier = "~=8.2" }, + { name = "pytest-asyncio", specifier = "~=0.23" }, + { name = "pytest-cov", specifier = ">=5.0" }, + { name = "pytest-xdist", extras = ["psutil"], specifier = "~=3.6" }, + { name = "ruff", specifier = "~=0.5" }, + { name = "snoop", specifier = "~=0.4" }, + { name = "types-pyyaml", specifier = "~=6.0.12.20240311" }, +] + +[[package]] +name = "sentence-transformers" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pillow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "scikit-learn", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "scipy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "torch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/65/8d/8d6147fdef0ed7aeff3dab487bd17619b512afab845eb295faa08b20a5d0/sentence_transformers-2.7.0.tar.gz", hash = "sha256:2f7df99d1c021dded471ed2d079e9d1e4fc8e30ecb06f957be060511b36f24ea", size = 128393 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/2c/bd95032aeb087b0706596af0a4518c4bfe0439a1bb149048ece18b617766/sentence_transformers-2.7.0-py3-none-any.whl", hash = "sha256:6a7276b05a95931581bbfa4ba49d780b2cf6904fa4a171ec7fd66c343f761c98", size = 171480 }, +] + +[[package]] +name = "setuptools" +version = "73.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/37/f4d4ce9bc15e61edba3179f9b0f763fc6d439474d28511b11f0d95bab7a2/setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193", size = 2526506 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6a/0270e295bf30c37567736b7fca10167640898214ff911273af37ddb95770/setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e", size = 2346588 }, +] + +[[package]] +name = "shapely" +version = "2.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/89/0d20bac88016be35ff7d3c0c2ae64b477908f1b1dfa540c5d69ac7af07fe/shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6", size = 282361 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d4/f84bbbdb7771f5b9ade94db2398b256cf1471f1eb0ca8afbe0f6ca725d5a/shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b", size = 1449635 }, + { url = "https://files.pythonhosted.org/packages/03/10/bd6edb66ed0a845f0809f7ce653596f6fd9c6be675b3653872f47bf49f82/shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b", size = 1296756 }, + { url = "https://files.pythonhosted.org/packages/af/09/6374c11cb493a9970e8c04d7be25f578a37f6494a2fecfbed3a447b16b2c/shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde", size = 2381960 }, + { url = "https://files.pythonhosted.org/packages/2b/a6/302e0d9c210ccf4d1ffadf7ab941797d3255dcd5f93daa73aaf116a4db39/shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e", size = 2468133 }, + { url = "https://files.pythonhosted.org/packages/8c/be/e448681dc485f2931d4adee93d531fce93608a3ee59433303cc1a46e21a5/shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e", size = 1294982 }, + { url = "https://files.pythonhosted.org/packages/cd/4c/6f4a6fc085e3be01c4c9de0117a2d373bf9fec5f0426cf4d5c94090a5a4d/shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4", size = 1441141 }, + { url = "https://files.pythonhosted.org/packages/37/15/269d8e1f7f658a37e61f7028683c546f520e4e7cedba1e32c77ff9d3a3c7/shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e", size = 1449578 }, + { url = "https://files.pythonhosted.org/packages/37/63/e182e43081fffa0a2d970c480f2ef91647a6ab94098f61748c23c2a485f2/shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2", size = 1296792 }, + { url = "https://files.pythonhosted.org/packages/6e/5a/d019f69449329dcd517355444fdb9ddd58bec5e080b8bdba007e8e4c546d/shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855", size = 2443997 }, + { url = "https://files.pythonhosted.org/packages/25/aa/53f145e5a610a49af9ac49f2f1be1ec8659ebd5c393d66ac94e57c83b00e/shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0", size = 2528334 }, + { url = "https://files.pythonhosted.org/packages/64/64/0c7b0a22b416d36f6296b92bb4219d82b53d0a7c47e16fd0a4c85f2f117c/shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d", size = 1294669 }, + { url = "https://files.pythonhosted.org/packages/b1/5a/6a67d929c467a1973b6bb9f0b00159cc343b02bf9a8d26db1abd2f87aa23/shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b", size = 1442032 }, + { url = "https://files.pythonhosted.org/packages/46/77/efd9f9d4b6a762f976f8b082f54c9be16f63050389500fb52e4f6cc07c1a/shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0", size = 1450326 }, + { url = "https://files.pythonhosted.org/packages/68/53/5efa6e7a4036a94fe6276cf7bbb298afded51ca3396b03981ad680c8cc7d/shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3", size = 1298480 }, + { url = "https://files.pythonhosted.org/packages/88/a2/1be1db4fc262e536465a52d4f19d85834724fedf2299a1b9836bc82fe8fa/shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8", size = 2439311 }, + { url = "https://files.pythonhosted.org/packages/d5/7d/9a57e187cbf2fbbbdfd4044a4f9ce141c8d221f9963750d3b001f0ec080d/shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726", size = 2524835 }, + { url = "https://files.pythonhosted.org/packages/6d/0a/f407509ab56825f39bf8cfce1fb410238da96cf096809c3e404e5bc71ea1/shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f", size = 1295613 }, + { url = "https://files.pythonhosted.org/packages/7b/b3/857afd9dfbfc554f10d683ac412eac6fa260d1f4cd2967ecb655c57e831a/shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48", size = 1442539 }, + { url = "https://files.pythonhosted.org/packages/34/e8/d164ef5b0eab86088cde06dee8415519ffd5bb0dd1bd9d021e640e64237c/shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013", size = 1445344 }, + { url = "https://files.pythonhosted.org/packages/ce/e2/9fba7ac142f7831757a10852bfa465683724eadbc93d2d46f74a16f9af04/shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7", size = 1296182 }, + { url = "https://files.pythonhosted.org/packages/cf/dc/790d4bda27d196cd56ec66975eaae3351c65614cafd0e16ddde39ec9fb92/shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381", size = 2423426 }, + { url = "https://files.pythonhosted.org/packages/af/b0/f8169f77eac7392d41e231911e0095eb1148b4d40c50ea9e34d999c89a7e/shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805", size = 2513249 }, + { url = "https://files.pythonhosted.org/packages/f6/1d/a8c0e9ab49ff2f8e4dedd71b0122eafb22a18ad7e9d256025e1f10c84704/shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a", size = 1294848 }, + { url = "https://files.pythonhosted.org/packages/23/38/2bc32dd1e7e67a471d4c60971e66df0bdace88656c47a9a728ace0091075/shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2", size = 1441371 }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, +] + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "snoop" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "cheap-repr", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "executing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "six", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/c1/c93715f44b16ad7ec52a7b48ae26bdc1880c0192d6075ba3a097e7b04f3e/snoop-0.4.3.tar.gz", hash = "sha256:2e0930bb19ff0dbdaa6f5933f88e89ed5984210ea9f9de0e1d8231fa5c1c1f25", size = 139747 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/b4/5eb395a7c44f382f42cc4ce2d544223c0506e06c61534f45a2188b8fdf13/snoop-0.4.3-py2.py3-none-any.whl", hash = "sha256:b7418581889ff78b29d9dc5ad4625c4c475c74755fb5cba82c693c6e32afadc0", size = 27841 }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "executing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pure-eval", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, +] + +[[package]] +name = "starlette" +version = "0.38.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/e2/d49a94ecb665b3a1c34b40c78165a737abc384fcabc843ccb14a3bd3dc37/starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75", size = 2844770 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/d976da9998e4f4a99e297cda09d61ce305919ea94cbeeb476dba4fece098/starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff", size = 72020 }, +] + +[[package]] +name = "sympy" +version = "1.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/15/4a041424c7187f41cce678f5a02189b244e9aac61a18b45cd415a3a470f3/sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13", size = 7532926 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9", size = 6189289 }, +] + +[[package]] +name = "tenacity" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 }, +] + +[[package]] +name = "threadpoolctl" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/55/b5148dcbf72f5cde221f8bfe3b6a540da7aa1842f6b491ad979a6c8b84af/threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107", size = 41936 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/2c/ffbf7a134b9ab11a67b0cf0726453cedd9c5043a4fe7a35d1cefa9a1bcfb/threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467", size = 18414 }, +] + +[[package]] +name = "tinycss2" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/44/6f/38d2335a2b70b9982d112bb177e3dbe169746423e33f718bf5e9c7b3ddd3/tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d", size = 67360 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/4d/0db5b8a613d2a59bbc29bc5bb44a2f8070eb9ceab11c50d477502a8a0092/tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7", size = 22532 }, +] + +[[package]] +name = "tokenizers" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/04/2071c150f374aab6d5e92aaec38d0f3c368d227dd9e0469a1f0966ac68d1/tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3", size = 321039 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/91cac8d496b304ec5a22f07606893cad35ea8e1a8406dc8909e365f97a80/tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97", size = 2533301 }, + { url = "https://files.pythonhosted.org/packages/4c/12/9cb68762ff5fee1efd51aefe2f62cb225f26f060a68a3779e1060bbc7a59/tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77", size = 2440223 }, + { url = "https://files.pythonhosted.org/packages/e4/03/b2020e6a78fb994cff1ec962adc157c23109172a46b4fe451d6d0dd33fdb/tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4", size = 3683779 }, + { url = "https://files.pythonhosted.org/packages/50/4e/2e5549a26dc6f9e434f83bebf16c2d7dc9dc3477cc0ec8b23ede4d465b90/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642", size = 3569431 }, + { url = "https://files.pythonhosted.org/packages/75/79/158626bd794e75551e0c6bb93f1cd3c9ba08ba14b181b98f09e95994f609/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46", size = 3424739 }, + { url = "https://files.pythonhosted.org/packages/65/8e/5f4316976c26009f1ae0b6543f3d97af29afa5ba5dc145251e6a07314618/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1", size = 3965791 }, + { url = "https://files.pythonhosted.org/packages/6a/e1/5dbac9618709972434eea072670cd69fba1aa988e6200f16057722b4bf96/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe", size = 4049879 }, + { url = "https://files.pythonhosted.org/packages/40/4f/eb78de4af3b17b589f43a369cbf0c3a7173f25c3d2cd93068852c07689aa/tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e", size = 3607049 }, + { url = "https://files.pythonhosted.org/packages/f5/f8/141dcb0f88e9452af8d20d14dd53aab5937222a2bb4f2c04bfed6829263c/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98", size = 9634084 }, + { url = "https://files.pythonhosted.org/packages/2e/be/debb7caa3f88ed54015170db16e07aa3a5fea2d3983d0dde92f98d888dc8/tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3", size = 9949480 }, + { url = "https://files.pythonhosted.org/packages/7a/e7/26bedf5d270d293d572a90bd66b0b030012aedb95d8ee87e8bcd446b76fb/tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837", size = 2041462 }, + { url = "https://files.pythonhosted.org/packages/f4/85/d999b9a05fd101d48f1a365d68be0b109277bb25c89fb37a389d669f9185/tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403", size = 2220036 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/6e1d728d765eb4102767f071bf7f6439ab10d7f4a975c9217db65715207a/tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059", size = 2533448 }, + { url = "https://files.pythonhosted.org/packages/90/79/d17a0f491d10817cd30f1121a07aa09c8e97a81114b116e473baf1577f09/tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14", size = 2440254 }, + { url = "https://files.pythonhosted.org/packages/c7/28/2d11c3ff94f9d42eceb2ea549a06e3f166fe391c5a025e5d96fac898a3ac/tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594", size = 3684971 }, + { url = "https://files.pythonhosted.org/packages/36/c6/537f22b57e6003904d35d07962dbde2f2e9bdd791d0241da976a4c7f8194/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc", size = 3568894 }, + { url = "https://files.pythonhosted.org/packages/af/ef/3c1deed14ec59b2c8e7e2fa27b2a53f7d101181277a43b89ab17d891ef2e/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2", size = 3426873 }, + { url = "https://files.pythonhosted.org/packages/06/db/c0320c4798ac6bd12d2ef895bec9d10d216a3b4d6fff10e9d68883ea7edc/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe", size = 3965050 }, + { url = "https://files.pythonhosted.org/packages/4c/8a/a166888d6cb14db55f5eb7ce0b1d4777d145aa27cbf4f945712cf6c29935/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d", size = 4047855 }, + { url = "https://files.pythonhosted.org/packages/a7/03/fb50fc03f86016b227a967c8d474f90230c885c0d18f78acdfda7a96ce56/tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa", size = 3608228 }, + { url = "https://files.pythonhosted.org/packages/5b/cd/0385e1026e1e03732fd398e964792a3a8433918b166748c82507e014d748/tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6", size = 9633115 }, + { url = "https://files.pythonhosted.org/packages/25/50/8f8ad0bbdaf09d04b15e6502d1fa1c653754ed7e016e4ae009726aa1a4e4/tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b", size = 9949062 }, + { url = "https://files.pythonhosted.org/packages/db/11/31be66710f1d14526f3588a441efadeb184e1e68458067007b20ead03c59/tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256", size = 2041039 }, + { url = "https://files.pythonhosted.org/packages/65/8e/6d7d72b28f22c422cff8beae10ac3c2e4376b9be721ef8167b7eecd1da62/tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66", size = 2220386 }, + { url = "https://files.pythonhosted.org/packages/63/90/2890cd096898dcdb596ee172cde40c0f54a9cf43b0736aa260a5501252af/tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153", size = 2530580 }, + { url = "https://files.pythonhosted.org/packages/74/d1/f4e1e950adb36675dfd8f9d0f4be644f3f3aaf22a5677a4f5c81282b662e/tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a", size = 2436682 }, + { url = "https://files.pythonhosted.org/packages/ed/30/89b321a16c58d233e301ec15072c0d3ed5014825e72da98604cd3ab2fba1/tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95", size = 3693494 }, + { url = "https://files.pythonhosted.org/packages/05/40/fa899f32de483500fbc78befd378fd7afba4270f17db707d1a78c0a4ddc3/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266", size = 3566541 }, + { url = "https://files.pythonhosted.org/packages/67/14/e7da32ae5fb4971830f1ef335932fae3fa57e76b537e852f146c850aefdf/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52", size = 3430792 }, + { url = "https://files.pythonhosted.org/packages/f2/4b/aae61bdb6ab584d2612170801703982ee0e35f8b6adacbeefe5a3b277621/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f", size = 3962812 }, + { url = "https://files.pythonhosted.org/packages/0a/b6/f7b7ef89c4da7b20256e6eab23d3835f05d1ca8f451d31c16cbfe3cd9eb6/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840", size = 4024688 }, + { url = "https://files.pythonhosted.org/packages/80/54/12047a69f5b382d7ee72044dc89151a2dd0d13b2c9bdcc22654883704d31/tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3", size = 3610961 }, + { url = "https://files.pythonhosted.org/packages/52/b7/1e8a913d18ac28feeda42d4d2d51781874398fb59cd1c1e2653a4b5742ed/tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea", size = 9631367 }, + { url = "https://files.pythonhosted.org/packages/ac/3d/2284f6d99f8f21d09352b88b8cfefa24ab88468d962aeb0aa15c20d76b32/tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c", size = 9950121 }, + { url = "https://files.pythonhosted.org/packages/2a/94/ec3369dbc9b7200c14c8c7a1a04c78b7a7398d0c001e1b7d1ffe30eb93a0/tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57", size = 2044069 }, + { url = "https://files.pythonhosted.org/packages/0c/97/80bff6937e0c67d30c0facacd4f0bcf4254e581aa4995c73cef8c8640e56/tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a", size = 2214527 }, + { url = "https://files.pythonhosted.org/packages/cf/7b/38fb7207cde3d1dc5272411cd18178e6437cdc1ef08cac5d0e8cfd57f38c/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334", size = 2532668 }, + { url = "https://files.pythonhosted.org/packages/1d/0d/2c452fe17fc17f0cdb713acb811eebb1f714b8c21d497c4672af4f491229/tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd", size = 2438321 }, + { url = "https://files.pythonhosted.org/packages/19/e0/f9e915d028b45798723eab59c253da28040aa66b9f31dcb7cfc3be88fa37/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594", size = 3682304 }, + { url = "https://files.pythonhosted.org/packages/ce/2b/db8a94608c392752681c2ca312487b7cd5bcc4f77e24a90daa4916138271/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda", size = 3566208 }, + { url = "https://files.pythonhosted.org/packages/d8/58/2e998462677c4c0eb5123ce386bcb488a155664d273d0283122866515f09/tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022", size = 3605791 }, + { url = "https://files.pythonhosted.org/packages/83/ac/26bc2e2bb2a054dc2e51699628936f5474e093b68da6ccdde04b2fc39ab8/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e", size = 9632867 }, + { url = "https://files.pythonhosted.org/packages/45/b6/36c1bb106bbe96012c9367df89ed01599cada036c0b96d38fbbdbeb75c9f/tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75", size = 9945103 }, +] + +[[package]] +name = "tomli" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 }, +] + +[[package]] +name = "torch" +version = "2.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "fsspec", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "networkx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-cublas-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cuda-cupti-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cuda-runtime-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cudnn-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cufft-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-curand-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cusolver-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cusparse-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-nccl-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-nvtx-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "sympy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "triton", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/05/d540049b1832d1062510efc6829634b7fbef5394c757d8312414fb65a3cb/torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971", size = 797072810 }, + { url = "https://files.pythonhosted.org/packages/a0/12/2162df9c47386ae7cedbc938f9703fee4792d93504fab8608d541e71ece3/torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3", size = 89699259 }, + { url = "https://files.pythonhosted.org/packages/5d/4c/b2a59ff0e265f5ee154f0d81e948b1518b94f545357731e1a3245ee5d45b/torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada", size = 199433813 }, + { url = "https://files.pythonhosted.org/packages/dc/fb/1333ba666bbd53846638dd75a7a1d4eaf964aff1c482fc046e2311a1b499/torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd", size = 62139309 }, + { url = "https://files.pythonhosted.org/packages/ea/ea/4ab009e953bca6ff35ad75b8ab58c0923308636c182c145dc63084f7d136/torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113", size = 797111232 }, + { url = "https://files.pythonhosted.org/packages/8f/a1/b31f94b4631c1731261db9fdc9a749ef58facc3b76094a6fe974f611f239/torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8", size = 89719574 }, + { url = "https://files.pythonhosted.org/packages/5a/6a/775b93d6888c31f1f1fc457e4f5cc89f0984412d5dcdef792b8f2aa6e812/torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c", size = 199436128 }, + { url = "https://files.pythonhosted.org/packages/1f/34/c93873c37f93154d982172755f7e504fdbae6c760499303a3111ce6ce327/torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea", size = 62145176 }, + { url = "https://files.pythonhosted.org/packages/cc/df/5204a13a7a973c23c7ade615bafb1a3112b5d0ec258d8390f078fa4ab0f7/torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042", size = 797019590 }, + { url = "https://files.pythonhosted.org/packages/4f/16/d23a689e5ef8001ed2ace1a3a59f2fda842889b0c3f3877799089925282a/torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d", size = 89613802 }, + { url = "https://files.pythonhosted.org/packages/a8/e0/ca8354dfb8d834a76da51b06e8248b70fc182bc163540507919124974bdf/torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c", size = 199387694 }, + { url = "https://files.pythonhosted.org/packages/ac/30/8b6f77ea4ce84f015ee024b8dfef0dac289396254e8bfd493906d4cbb848/torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d", size = 62123443 }, +] + +[[package]] +name = "tornado" +version = "6.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/66/398ac7167f1c7835406888a386f6d0d26ee5dbf197d8a571300be57662d3/tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9", size = 500623 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/d9/c33be3c1a7564f7d42d87a8d186371a75fd142097076767a5c27da941fef/tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8", size = 435924 }, + { url = "https://files.pythonhosted.org/packages/2e/0f/721e113a2fac2f1d7d124b3279a1da4c77622e104084f56119875019ffab/tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14", size = 433883 }, + { url = "https://files.pythonhosted.org/packages/13/cf/786b8f1e6fe1c7c675e79657448178ad65e41c1c9765ef82e7f6f765c4c5/tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4", size = 437224 }, + { url = "https://files.pythonhosted.org/packages/e4/8e/a6ce4b8d5935558828b0f30f3afcb2d980566718837b3365d98e34f6067e/tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842", size = 436597 }, + { url = "https://files.pythonhosted.org/packages/22/d4/54f9d12668b58336bd30defe0307e6c61589a3e687b05c366f804b7faaf0/tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3", size = 436797 }, + { url = "https://files.pythonhosted.org/packages/cf/3f/2c792e7afa7dd8b24fad7a2ed3c2f24a5ec5110c7b43a64cb6095cc106b8/tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f", size = 437516 }, + { url = "https://files.pythonhosted.org/packages/71/63/c8fc62745e669ac9009044b889fc531b6f88ac0f5f183cac79eaa950bb23/tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4", size = 436958 }, + { url = "https://files.pythonhosted.org/packages/94/d4/f8ac1f5bd22c15fad3b527e025ce219bd526acdbd903f52053df2baecc8b/tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698", size = 436882 }, + { url = "https://files.pythonhosted.org/packages/4b/3e/a8124c21cc0bbf144d7903d2a0cadab15cadaf683fa39a0f92bc567f0d4d/tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d", size = 438092 }, + { url = "https://files.pythonhosted.org/packages/d9/2f/3f2f05e84a7aff787a96d5fb06821323feb370fe0baed4db6ea7b1088f32/tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7", size = 438532 }, +] + +[[package]] +name = "tqdm" +version = "4.66.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "(platform_system == 'Windows' and sys_platform == 'darwin') or (platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/83/6ba9844a41128c62e810fddddd72473201f3eacde02046066142a2d96cc5/tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad", size = 169504 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/5d/acf5905c36149bbaec41ccf7f2b68814647347b72075ac0b1fe3022fdc73/tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd", size = 78351 }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, +] + +[[package]] +name = "transformers" +version = "4.44.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "safetensors", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/46/62e914365ab463addb0357a88f8d2614aae02f1a2b2b5c24c7ee005ff157/transformers-4.44.1.tar.gz", hash = "sha256:3b9a1a07ca65c665c7bf6109b7da76182184d10bb58d9ab14e6892e7b9e073a2", size = 8110315 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/ab/c42556ba7c5aed687256466d472abb9a1b9cbff5730aa42a884d892e061a/transformers-4.44.1-py3-none-any.whl", hash = "sha256:bd2642da18b4e6d29b135c17650cd7ca8e874f2d092d2eddd3ed6b71a93a155c", size = 9465379 }, +] + +[package.optional-dependencies] +torch = [ + { name = "accelerate", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "torch", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "triton" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a", size = 209376304 }, + { url = "https://files.pythonhosted.org/packages/33/3e/a2f59384587eff6aeb7d37b6780de7fedd2214935e27520430ca9f5b7975/triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c", size = 209438883 }, + { url = "https://files.pythonhosted.org/packages/fe/7b/7757205dee3628f75e7991021d15cd1bd0c9b044ca9affe99b50879fc0e1/triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb", size = 209464695 }, +] + +[[package]] +name = "typer" +version = "0.12.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "shellingham", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d4/f7/f174a1cae84848ae8b27170a96187b91937b743f0580ff968078fe16930a/typer-0.12.4.tar.gz", hash = "sha256:c9c1613ed6a166162705b3347b8d10b661ccc5d95692654d0fb628118f2c34e6", size = 97945 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/cc/15083dcde1252a663398b1b2a173637a3ec65adadfb95137dc95df1e6adc/typer-0.12.4-py3-none-any.whl", hash = "sha256:819aa03699f438397e876aa12b0d63766864ecba1b579092cc9fe35d886e34b6", size = 47402 }, +] + +[[package]] +name = "types-cffi" +version = "1.16.0.20240331" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/c8/81e5699160b91f0f91eea852d84035c412bfb4b3a29389701044400ab379/types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee", size = 11318 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/7a/98f5d2493a652cec05d3b09be59202d202004a41fca9c70d224782611365/types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0", size = 14550 }, +] + +[[package]] +name = "types-pyopenssl" +version = "24.1.0.20240722" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "types-cffi", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499 }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240808" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/08/6f5737f645571b7a0b1ebd2fe8b5cf1ee4ec3e707866ca96042a86fc1d10/types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af", size = 12359 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/ad/ffbad24e2bc8f20bf047ec22af0c0a92f6ce2071eb21c9103df600cda6de/types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35", size = 15298 }, +] + +[[package]] +name = "types-redis" +version = "4.6.0.20240819" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "types-pyopenssl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/2b/a09204d0901d9d319b38f26434c5544f400b2a551df9ecad9ad0437987a0/types-redis-4.6.0.20240819.tar.gz", hash = "sha256:08f51f550ad41d0152bd98d77ac9d6d8f761369121710a213642f6036b9a7183", size = 49539 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/5d/f636b3bb65d52705abc9eb5832864dbd99d26ad1b1c2b5c2ff24af12249d/types_redis-4.6.0.20240819-py3-none-any.whl", hash = "sha256:86db9af6f0033154e12bc22c77236cef0907b995fda8c9f0f0eacd59943ed2fc", size = 58720 }, +] + +[[package]] +name = "types-setuptools" +version = "72.2.0.20240821" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/db/7b2c5190db1c74e42aebefa081f1f82741a2591a77e4e6471d52938ea15a/types-setuptools-72.2.0.20240821.tar.gz", hash = "sha256:e349b8015608879939f27ee370672f801287c46f5caa2d188d416336172c4965", size = 42123 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/d0/ff619a3202824e54d3b048fe7c34e5904b8e985196c6bcd5496e843e01b7/types_setuptools-72.2.0.20240821-py3-none-any.whl", hash = "sha256:260e89d6d3b42cc35f9f0f382d030713b7b547344a664c05c9175e6ba124fac7", size = 66596 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "tzdata" +version = "2024.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/5b/e025d02cb3b66b7b76093404392d4b44343c69101cc85f4d180dd5784717/tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", size = 190559 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/58/f9c9e6be752e9fcb8b6a0ee9fb87e6e7a1f6bcab2cdc73f02bb7ba91ada0/tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252", size = 345370 }, +] + +[[package]] +name = "ujson" +version = "5.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/00/3110fd566786bfa542adb7932d62035e0c0ef662a8ff6544b6643b3d6fd7/ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1", size = 7154885 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/91/91678e49a9194f527e60115db84368c237ac7824992224fac47dcb23a5c6/ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd", size = 55354 }, + { url = "https://files.pythonhosted.org/packages/de/2f/1ed8c9b782fa4f44c26c1c4ec686d728a4865479da5712955daeef0b2e7b/ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf", size = 51808 }, + { url = "https://files.pythonhosted.org/packages/51/bf/a3a38b2912288143e8e613c6c4c3f798b5e4e98c542deabf94c60237235f/ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6", size = 51995 }, + { url = "https://files.pythonhosted.org/packages/b4/6d/0df8f7a6f1944ba619d93025ce468c9252aa10799d7140e07014dfc1a16c/ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569", size = 53566 }, + { url = "https://files.pythonhosted.org/packages/d5/ec/370741e5e30d5f7dc7f31a478d5bec7537ce6bfb7f85e72acefbe09aa2b2/ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770", size = 58499 }, + { url = "https://files.pythonhosted.org/packages/fe/29/72b33a88f7fae3c398f9ba3e74dc2e5875989b25f1c1f75489c048a2cf4e/ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1", size = 997881 }, + { url = "https://files.pythonhosted.org/packages/70/5c/808fbf21470e7045d56a282cf5e85a0450eacdb347d871d4eb404270ee17/ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5", size = 1140631 }, + { url = "https://files.pythonhosted.org/packages/8f/6a/e1e8281408e6270d6ecf2375af14d9e2f41c402ab6b161ecfa87a9727777/ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51", size = 1043511 }, + { url = "https://files.pythonhosted.org/packages/cb/ca/e319acbe4863919ec62498bc1325309f5c14a3280318dca10fe1db3cb393/ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518", size = 38626 }, + { url = "https://files.pythonhosted.org/packages/78/ec/dc96ca379de33f73b758d72e821ee4f129ccc32221f4eb3f089ff78d8370/ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f", size = 42076 }, + { url = "https://files.pythonhosted.org/packages/23/ec/3c551ecfe048bcb3948725251fb0214b5844a12aa60bee08d78315bb1c39/ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00", size = 55353 }, + { url = "https://files.pythonhosted.org/packages/8d/9f/4731ef0671a0653e9f5ba18db7c4596d8ecbf80c7922dd5fe4150f1aea76/ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126", size = 51813 }, + { url = "https://files.pythonhosted.org/packages/1f/2b/44d6b9c1688330bf011f9abfdb08911a9dc74f76926dde74e718d87600da/ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8", size = 51988 }, + { url = "https://files.pythonhosted.org/packages/29/45/f5f5667427c1ec3383478092a414063ddd0dfbebbcc533538fe37068a0a3/ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b", size = 53561 }, + { url = "https://files.pythonhosted.org/packages/26/21/a0c265cda4dd225ec1be595f844661732c13560ad06378760036fc622587/ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9", size = 58497 }, + { url = "https://files.pythonhosted.org/packages/28/36/8fde862094fd2342ccc427a6a8584fed294055fdee341661c78660f7aef3/ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f", size = 997877 }, + { url = "https://files.pythonhosted.org/packages/90/37/9208e40d53baa6da9b6a1c719e0670c3f474c8fc7cc2f1e939ec21c1bc93/ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4", size = 1140632 }, + { url = "https://files.pythonhosted.org/packages/89/d5/2626c87c59802863d44d19e35ad16b7e658e4ac190b0dead17ff25460b4c/ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1", size = 1043513 }, + { url = "https://files.pythonhosted.org/packages/2f/ee/03662ce9b3f16855770f0d70f10f0978ba6210805aa310c4eebe66d36476/ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f", size = 38616 }, + { url = "https://files.pythonhosted.org/packages/3e/20/952dbed5895835ea0b82e81a7be4ebb83f93b079d4d1ead93fcddb3075af/ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720", size = 42071 }, + { url = "https://files.pythonhosted.org/packages/e8/a6/fd3f8bbd80842267e2d06c3583279555e8354c5986c952385199d57a5b6c/ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5", size = 55642 }, + { url = "https://files.pythonhosted.org/packages/a8/47/dd03fd2b5ae727e16d5d18919b383959c6d269c7b948a380fdd879518640/ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e", size = 51807 }, + { url = "https://files.pythonhosted.org/packages/25/23/079a4cc6fd7e2655a473ed9e776ddbb7144e27f04e8fc484a0fb45fe6f71/ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043", size = 51972 }, + { url = "https://files.pythonhosted.org/packages/04/81/668707e5f2177791869b624be4c06fb2473bf97ee33296b18d1cf3092af7/ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1", size = 53686 }, + { url = "https://files.pythonhosted.org/packages/bd/50/056d518a386d80aaf4505ccf3cee1c40d312a46901ed494d5711dd939bc3/ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3", size = 58591 }, + { url = "https://files.pythonhosted.org/packages/fc/d6/aeaf3e2d6fb1f4cfb6bf25f454d60490ed8146ddc0600fae44bfe7eb5a72/ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21", size = 997853 }, + { url = "https://files.pythonhosted.org/packages/f8/d5/1f2a5d2699f447f7d990334ca96e90065ea7f99b142ce96e85f26d7e78e2/ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2", size = 1140689 }, + { url = "https://files.pythonhosted.org/packages/f2/2c/6990f4ccb41ed93744aaaa3786394bca0875503f97690622f3cafc0adfde/ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e", size = 1043576 }, + { url = "https://files.pythonhosted.org/packages/14/f5/a2368463dbb09fbdbf6a696062d0c0f62e4ae6fa65f38f829611da2e8fdd/ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e", size = 38764 }, + { url = "https://files.pythonhosted.org/packages/59/2d/691f741ffd72b6c84438a93749ac57bf1a3f217ac4b0ea4fd0e96119e118/ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc", size = 42211 }, + { url = "https://files.pythonhosted.org/packages/0d/69/b3e3f924bb0e8820bb46671979770c5be6a7d51c77a66324cdb09f1acddb/ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287", size = 55646 }, + { url = "https://files.pythonhosted.org/packages/32/8a/9b748eb543c6cabc54ebeaa1f28035b1bd09c0800235b08e85990734c41e/ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e", size = 51806 }, + { url = "https://files.pythonhosted.org/packages/39/50/4b53ea234413b710a18b305f465b328e306ba9592e13a791a6a6b378869b/ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557", size = 51975 }, + { url = "https://files.pythonhosted.org/packages/b4/9d/8061934f960cdb6dd55f0b3ceeff207fcc48c64f58b43403777ad5623d9e/ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988", size = 53693 }, + { url = "https://files.pythonhosted.org/packages/f5/be/7bfa84b28519ddbb67efc8410765ca7da55e6b93aba84d97764cd5794dbc/ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816", size = 58594 }, + { url = "https://files.pythonhosted.org/packages/48/eb/85d465abafb2c69d9699cfa5520e6e96561db787d36c677370e066c7e2e7/ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20", size = 997853 }, + { url = "https://files.pythonhosted.org/packages/9f/76/2a63409fc05d34dd7d929357b7a45e3a2c96f22b4225cd74becd2ba6c4cb/ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0", size = 1140694 }, + { url = "https://files.pythonhosted.org/packages/45/ed/582c4daba0f3e1688d923b5cb914ada1f9defa702df38a1916c899f7c4d1/ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f", size = 1043580 }, + { url = "https://files.pythonhosted.org/packages/d7/0c/9837fece153051e19c7bade9f88f9b409e026b9525927824cdf16293b43b/ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165", size = 38766 }, + { url = "https://files.pythonhosted.org/packages/d7/72/6cb6728e2738c05bbe9bd522d6fc79f86b9a28402f38663e85a28fddd4a0/ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539", size = 42212 }, + { url = "https://files.pythonhosted.org/packages/95/53/e5f5e733fc3525e65f36f533b0dbece5e5e2730b760e9beacf7e3d9d8b26/ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64", size = 51846 }, + { url = "https://files.pythonhosted.org/packages/59/1f/f7bc02a54ea7b47f3dc2d125a106408f18b0f47b14fc737f0913483ae82b/ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3", size = 48103 }, + { url = "https://files.pythonhosted.org/packages/1a/3a/d3921b6f29bc744d8d6c56db5f8bbcbe55115fd0f2b79c3c43ff292cc7c9/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a", size = 47257 }, + { url = "https://files.pythonhosted.org/packages/f1/04/f4e3883204b786717038064afd537389ba7d31a72b437c1372297cb651ea/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746", size = 48468 }, + { url = "https://files.pythonhosted.org/packages/17/cd/9c6547169eb01a22b04cbb638804ccaeb3c2ec2afc12303464e0f9b2ee5a/ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88", size = 54266 }, + { url = "https://files.pythonhosted.org/packages/70/bf/ecd14d3cf6127f8a990b01f0ad20e257f5619a555f47d707c57d39934894/ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b", size = 42224 }, +] + +[[package]] +name = "uritemplate" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/5a/4742fdba39cd02a56226815abfa72fe0aa81c33bed16ed045647d6000eba/uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0", size = 273898 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c0/7461b49cd25aeece13766f02ee576d1db528f1c37ce69aee300e075b485b/uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e", size = 10356 }, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/6d/fa469ae21497ddc8bc93e5877702dca7cb8f911e337aca7452b5724f1bb6/urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168", size = 292266 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/1c/89ffc63a9605b583d5df2be791a27bc1a42b7c32bab68d3c8f2f73a98cd4/urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", size = 121444 }, +] + +[[package]] +name = "usearch" +version = "2.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/7e/230db02465a09c70af49c71d8443e049a00647c8c35867b2d893b2e2610e/usearch-2.14.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f875f160463ccfcd120a636555f0821b3bd7bdb304ae936f30d603ec31ab0f7", size = 700510 }, + { url = "https://files.pythonhosted.org/packages/71/f5/024f1598a820cc94a106485e8d8a8a67b147465347ad655e6812cda0d313/usearch-2.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cfbdbae7f80e1e4cccd3ee1e9091d0c5894ed29d42ec0977286020c0f19854d8", size = 375928 }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7e30169dc73efcf13b3d96bcfe067657d5aa5faf9550fbeccc43e297bfee/usearch-2.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5edc4de8c71e860f52d449672e89be22fa2c8bbd318caaac747ec05bfdc413b", size = 363694 }, + { url = "https://files.pythonhosted.org/packages/95/73/dda1b3baa8cd877466ece35a25ff9a3e9a0e593ceb2d9f20ddb289a1d39c/usearch-2.14.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7319f70f0e98b2a08afa9e3c4c8acb01b48691388702bf6bc5c10fa534d4fddf", size = 1262813 }, + { url = "https://files.pythonhosted.org/packages/be/3e/8eae798e4a4b38cb064fcbb6090f389f8ff27576b84eb3586db7788f51d6/usearch-2.14.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2b619b6e84a3ba16d14932dbe3c0af2d03b21cc57e12695b871cea1735e42f98", size = 1455375 }, + { url = "https://files.pythonhosted.org/packages/2c/7a/100de85aa1ee36e11dce3f794f7b4e72f0b3d68bd14555fdd9c15665892b/usearch-2.14.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdc09e961b281ed7622e4202cf4260ae043df10b27be2537f7b4d5ae1d4de7c3", size = 2196879 }, + { url = "https://files.pythonhosted.org/packages/d5/1a/14397058ee4a02669445fa6f7cb4eb9fa8816be10fd3b9c581613f9cd46e/usearch-2.14.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d73a4c63688696f217992e00962a7c93a46f823b9fc99b2733f8a44470b87b0d", size = 2325029 }, + { url = "https://files.pythonhosted.org/packages/e3/ac/e6e749271041b218f1929867a1fa70c7f59e4e4f4f0d9d1feb8dcd59e873/usearch-2.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:78f0acff1ca99eeda1304f3bd919b6fa42c5d7864ac424f3195846f1eff1bb33", size = 281028 }, + { url = "https://files.pythonhosted.org/packages/6f/c4/481aebb1481590a2fccfe6312457fa0f9f6258ace50ebaf17025741ed995/usearch-2.14.0-cp310-cp310-win_arm64.whl", hash = "sha256:1de0bc99cca6e8ff67f6fed85bc9a82ab8542e4eeed32a754bf76700bdfa32bc", size = 261013 }, + { url = "https://files.pythonhosted.org/packages/b6/e3/a1c25e540fa805d33562e7919481a5df901a406fe905c8e280fb45edb658/usearch-2.14.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6cbc5a2e511f212dea10e799b60b9639fae4c0eddd3bef36b62a24d03b3157ac", size = 704460 }, + { url = "https://files.pythonhosted.org/packages/da/d6/f53f7706fe2c2022d5a3716689de280338fec4332c37266ccb018f5794f9/usearch-2.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2454f7b73b3ba955e01e3500477aaf45636527f8ef41c90064e042f0e503c884", size = 377627 }, + { url = "https://files.pythonhosted.org/packages/97/7f/68b1e53438ceb88bf933a392f9c7a6752e87008d65e6bd0642cb5286dd7f/usearch-2.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:53db1ab686c0a70cf828d1566cc79d65e0fbe10637fc0db1e176d4cfe586a27c", size = 364731 }, + { url = "https://files.pythonhosted.org/packages/72/b4/de152d16f0ea8b9051392f9438c0f9319ee42f1aee3dcf84dfbc3e33eb82/usearch-2.14.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:170ff8d97285a0679f5a14f32c72fc507f3d9227073c874d80b5040fa42c23ba", size = 1268817 }, + { url = "https://files.pythonhosted.org/packages/3f/db/f99d1e6a31be07e4037878326f4cffffa93e6315586b51f85c1fc30a4182/usearch-2.14.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:dd16b87467088785062b4491aa428e549cea18efe2d0c2122aaedd3540e61969", size = 1461166 }, + { url = "https://files.pythonhosted.org/packages/22/3d/0733928b43de8dd37788fde6bec4ea98255ed98b25cbda68a6e1321bdabd/usearch-2.14.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fef2b58ce1891ebdf8c5a10ea330a5eb62ead482dfc85fb68c4ef2f7045a3b0c", size = 2199208 }, + { url = "https://files.pythonhosted.org/packages/87/72/fd55703fdf0f47f644a80aac15418d3893233f4ffa64f0a01361ddbca015/usearch-2.14.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:688b8693378c39fcfec54593a08a6ffa905cb4c60cf7bc7be0f05bef8fd5cd79", size = 2327971 }, + { url = "https://files.pythonhosted.org/packages/44/1d/ab643bb6c801248c41df66ea216e81714d51749c51a38eab70d34c60ae5f/usearch-2.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:ae5af33960cbf46f7133417bacaf35273b71b3974e40a259ad18f8c1580b77c0", size = 281791 }, + { url = "https://files.pythonhosted.org/packages/3a/51/f943fb593e215b8bc9c03cafbfc21973de489397f66809911f51ccd4e8a7/usearch-2.14.0-cp311-cp311-win_arm64.whl", hash = "sha256:0f4343d30b936280dea119fecca2ab825c4118ae42bf7bf682f0dc64e870e716", size = 262136 }, + { url = "https://files.pythonhosted.org/packages/ba/2d/7b281e8a47cd6a9bf46554ed8d2a9117f2b5dd1abf7922dfb18dc8c556a9/usearch-2.14.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:290f7040833b4ff4e0182cd4d7f9c26ba98d43f54a8df512ff925f0cf636051b", size = 710202 }, + { url = "https://files.pythonhosted.org/packages/24/66/16a5f91707e2faf3ac7ab27d742b53247647e4a8c05cbc87cb32e1c04b67/usearch-2.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c2c68e6d205c6a8c16ee6af4c8e09a65cfbef52a6fe6b9aa2c9b1946dcc9970", size = 381509 }, + { url = "https://files.pythonhosted.org/packages/14/f4/dc2fe99e9a1fb786edb9a7b76748a05d0e4d05c09619f88d79bc8ddf6348/usearch-2.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3ff38f82dd452875d9b2e07d774f86a47cadcf910b4caa76a78c05084efdfd06", size = 366262 }, + { url = "https://files.pythonhosted.org/packages/de/4b/aab1a4270abea0daa7a8f025d27bbfb34da41aaaa4b3487d407389387711/usearch-2.14.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c27acd1ae6d2cc79e7909058562ae55ceb919a97af6f9fd5d769e3dc929aa60", size = 1269220 }, + { url = "https://files.pythonhosted.org/packages/ad/6b/2d225adc6c4cc06c3fb7f0cd0a0600122f117c32e3dc41c92735cbe07c02/usearch-2.14.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:51f4a8c88c0f23cd9e8e2da56cb63e1e605918af8cd3bea5e9b0200188c1c1b6", size = 1465916 }, + { url = "https://files.pythonhosted.org/packages/f9/b1/f177a828256e2595bdfdc658fdd73ffad840d26bcbc0191f6d5d1062f824/usearch-2.14.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5c298f73a60f96a50661e7815e9f1917b80cfd3cb3ba17724cd2a7974bf3db4e", size = 2197827 }, + { url = "https://files.pythonhosted.org/packages/86/38/495db24f48ceab3028dbd4584a7abd265c68f9a568f003b7860cef591049/usearch-2.14.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afb98f7cf8a6e8a849150a39d767c25c5e74d049d03dc96dc630863902163a84", size = 2334400 }, + { url = "https://files.pythonhosted.org/packages/d6/05/3154e5d365fb518fd846ba88fc85f9cd87ad96541bdcf2ce55fce6652003/usearch-2.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:610d886732f3bbafbe5e840525ab10711c376d686a703240e3ce721a636fc12b", size = 283005 }, + { url = "https://files.pythonhosted.org/packages/ba/ca/31efa3416a04c18e24cc05aa06c1e09487dd13a4e17e757800606809bce6/usearch-2.14.0-cp312-cp312-win_arm64.whl", hash = "sha256:f59a50964f1a2bd457adea1b35644fad4e15d05499d86a5f323d7c92e50185b6", size = 263231 }, +] + +[[package]] +name = "uvicorn" +version = "0.30.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "h11", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "typing-extensions", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/01/5e637e7aa9dd031be5376b9fb749ec20b86f5a5b6a49b87fabd374d5fa9f/uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788", size = 42825 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/8e/cdc7d6263db313030e4c257dd5ba3909ebc4e4fb53ad62d5f09b1a2f5458/uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5", size = 62835 }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "uvloop", marker = "(platform_python_implementation != 'PyPy' and sys_platform == 'darwin') or (platform_python_implementation != 'PyPy' and sys_platform == 'linux')" }, + { name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "websockets", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] + +[[package]] +name = "uvloop" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/f1/dc9577455e011ad43d9379e836ee73f40b4f99c02946849a44f7ae64835e/uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469", size = 2329938 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/69/cc1ad125ea8ce4a4d3ba7d9836062c3fc9063cf163ddf0f168e73f3268e3/uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996", size = 1363922 }, + { url = "https://files.pythonhosted.org/packages/f7/45/5a3f7a32372e4a90dfd83f30507183ec38990b8c5930ed7e36c6a15af47b/uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b", size = 760386 }, + { url = "https://files.pythonhosted.org/packages/9e/a5/9e973b25ade12c938940751bce71d0cb36efee3489014471f7d9c0a3c379/uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10", size = 3432586 }, + { url = "https://files.pythonhosted.org/packages/a9/e0/0bec8a25b2e9cf14fdfcf0229637b437c923b4e5ca22f8e988363c49bb51/uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae", size = 3431802 }, + { url = "https://files.pythonhosted.org/packages/95/3b/14cef46dcec6237d858666a4a1fdb171361528c70fcd930bfc312920e7a9/uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006", size = 4144444 }, + { url = "https://files.pythonhosted.org/packages/9d/5a/0ac516562ff783f760cab3b061f10fdeb4a9f985ad4b44e7e4564ff11691/uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73", size = 4147039 }, + { url = "https://files.pythonhosted.org/packages/64/bf/45828beccf685b7ed9638d9b77ef382b470c6ca3b5bff78067e02ffd5663/uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037", size = 1320593 }, + { url = "https://files.pythonhosted.org/packages/27/c0/3c24e50bee7802a2add96ca9f0d5eb0ebab07e0a5615539d38aeb89499b9/uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9", size = 736676 }, + { url = "https://files.pythonhosted.org/packages/83/ce/ffa3c72954eae36825acfafd2b6a9221d79abd2670c0d25e04d6ef4a2007/uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e", size = 3494573 }, + { url = "https://files.pythonhosted.org/packages/46/6d/4caab3a36199ba52b98d519feccfcf48921d7a6649daf14a93c7e77497e9/uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756", size = 3489932 }, + { url = "https://files.pythonhosted.org/packages/e4/4f/49c51595bd794945c88613df88922c38076eae2d7653f4624aa6f4980b07/uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0", size = 4185596 }, + { url = "https://files.pythonhosted.org/packages/b8/94/7e256731260d313f5049717d1c4582d52a3b132424c95e16954a50ab95d3/uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf", size = 4185746 }, + { url = "https://files.pythonhosted.org/packages/2d/64/31cbd379d6e260ac8de3f672f904e924f09715c3f192b09f26cc8e9f574c/uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d", size = 1324302 }, + { url = "https://files.pythonhosted.org/packages/1e/6b/9207e7177ff30f78299401f2e1163ea41130d4fd29bcdc6d12572c06b728/uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e", size = 738105 }, + { url = "https://files.pythonhosted.org/packages/c1/ba/b64b10f577519d875992dc07e2365899a1a4c0d28327059ce1e1bdfb6854/uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9", size = 4090658 }, + { url = "https://files.pythonhosted.org/packages/0a/f8/5ceea6876154d926604f10c1dd896adf9bce6d55a55911364337b8a5ed8d/uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab", size = 4173357 }, + { url = "https://files.pythonhosted.org/packages/18/b2/117ab6bfb18274753fbc319607bf06e216bd7eea8be81d5bac22c912d6a7/uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5", size = 4029868 }, + { url = "https://files.pythonhosted.org/packages/6f/52/deb4be09060637ef4752adaa0b75bf770c20c823e8108705792f99cd4a6f/uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00", size = 4115980 }, +] + +[[package]] +name = "validators" +version = "0.33.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5d/af/5ad4fed95276e3eb7628d858c88cd205799bcad847e46223760a3129cbb1/validators-0.33.0.tar.gz", hash = "sha256:535867e9617f0100e676a1257ba1e206b9bfd847ddc171e4d44811f07ff0bfbf", size = 70741 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/22/91b4bd36df27e651daedd93d03d5d3bb6029fdb0b55494e45ee46c36c570/validators-0.33.0-py3-none-any.whl", hash = "sha256:134b586a98894f8139865953899fc2daeb3d0c35569552c5518f089ae43ed075", size = 43298 }, +] + +[[package]] +name = "virtualenv" +version = "20.26.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "platformdirs", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/60/db9f95e6ad456f1872486769c55628c7901fb4de5a72c2f7bdd912abf0c1/virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a", size = 9057588 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/4d/410156100224c5e2f0011d435e477b57aed9576fc7fe137abcf14ec16e11/virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589", size = 5684792 }, +] + +[[package]] +name = "watchfiles" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/1a/b06613ef620d7f5ca712a3d4928ec1c07182159a64277fcdf7738edb0b32/watchfiles-0.23.0.tar.gz", hash = "sha256:9338ade39ff24f8086bb005d16c29f8e9f19e55b18dcb04dfa26fcbc09da497b", size = 37384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/08/c0a09fc63a6b75fccd3e99b21f07ddb812e64a78da10703397b39653263e/watchfiles-0.23.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bee8ce357a05c20db04f46c22be2d1a2c6a8ed365b325d08af94358e0688eeb4", size = 374424 }, + { url = "https://files.pythonhosted.org/packages/8c/ff/2b338016e96ab592e8d9cece0260b9fca54d8bed7b36940c46112eda2e49/watchfiles-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ccd3011cc7ee2f789af9ebe04745436371d36afe610028921cab9f24bb2987b", size = 369288 }, + { url = "https://files.pythonhosted.org/packages/43/cf/747f412b75ea4bb5419e659ae8b2713a327b6f879e3f2e0695c7d7275cf3/watchfiles-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb02d41c33be667e6135e6686f1bb76104c88a312a18faa0ef0262b5bf7f1a0f", size = 441212 }, + { url = "https://files.pythonhosted.org/packages/c6/64/07f4c50883f1406e5a4187651b4d3d2495582df9f70d320ee3c9e7ed19be/watchfiles-0.23.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf12ac34c444362f3261fb3ff548f0037ddd4c5bb85f66c4be30d2936beb3c5", size = 437765 }, + { url = "https://files.pythonhosted.org/packages/d8/e9/4af3cfb2eb161003ce79518eb0cbfd014313e30dc842209776fbee3a64e6/watchfiles-0.23.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0b2c25040a3c0ce0e66c7779cc045fdfbbb8d59e5aabfe033000b42fe44b53e", size = 456151 }, + { url = "https://files.pythonhosted.org/packages/b4/45/d97e61c893fc59d4b0c4154fdf26449e103a3783db4d981bb8e7301af532/watchfiles-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf2be4b9eece4f3da8ba5f244b9e51932ebc441c0867bd6af46a3d97eb068d6", size = 472320 }, + { url = "https://files.pythonhosted.org/packages/f2/55/7266cd63e736abbde902cbd99fb70a8eddfac7e0e52ed52fbbee6eed5f95/watchfiles-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40cb8fa00028908211eb9f8d47744dca21a4be6766672e1ff3280bee320436f1", size = 480442 }, + { url = "https://files.pythonhosted.org/packages/22/ec/c756c012b174ccf5f2ee32202603e66b33b93a54cf16c69a7440c764d7f9/watchfiles-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f48c917ffd36ff9a5212614c2d0d585fa8b064ca7e66206fb5c095015bc8207", size = 427729 }, + { url = "https://files.pythonhosted.org/packages/78/94/97ac8d7a19f5439ab5cc28d0b5d648760358e43097f6acb8cb7165c4c1b7/watchfiles-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9d183e3888ada88185ab17064079c0db8c17e32023f5c278d7bf8014713b1b5b", size = 616359 }, + { url = "https://files.pythonhosted.org/packages/fc/c9/568a54e07245a068819572a7d51c7d2f6ff8e7018102e956156fadae408c/watchfiles-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9837edf328b2805346f91209b7e660f65fb0e9ca18b7459d075d58db082bf981", size = 597955 }, + { url = "https://files.pythonhosted.org/packages/fa/2c/ba3e9d54c17a4014996555a0b31f4be1c8920fdfe067942f60873ac8931a/watchfiles-0.23.0-cp310-none-win32.whl", hash = "sha256:296e0b29ab0276ca59d82d2da22cbbdb39a23eed94cca69aed274595fb3dfe42", size = 264290 }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d7aa3f23c78b2c9cac5d1cf46fed5f74340d0ffd47c0f485b76419ec6597/watchfiles-0.23.0-cp310-none-win_amd64.whl", hash = "sha256:4ea756e425ab2dfc8ef2a0cb87af8aa7ef7dfc6fc46c6f89bcf382121d4fff75", size = 275914 }, + { url = "https://files.pythonhosted.org/packages/14/5f/787386438d895145099e1415d1fbd3ff047a4f5e329134fd30677fe83f1f/watchfiles-0.23.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e397b64f7aaf26915bf2ad0f1190f75c855d11eb111cc00f12f97430153c2eab", size = 374801 }, + { url = "https://files.pythonhosted.org/packages/76/6f/3075cd9c69fdce2544fb13cb9e3c8ad51424cb2c552b019514799a14966e/watchfiles-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4ac73b02ca1824ec0a7351588241fd3953748d3774694aa7ddb5e8e46aef3e3", size = 368210 }, + { url = "https://files.pythonhosted.org/packages/ab/6b/cd4faa27088a8b612ffdfa25e3d413e676a6173b8b02a33e7fec152d75ca/watchfiles-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130a896d53b48a1cecccfa903f37a1d87dbb74295305f865a3e816452f6e49e4", size = 441356 }, + { url = "https://files.pythonhosted.org/packages/39/ba/d361135dac6cd0fb4449f4f058c053eb9b42f70ff4d9a13767808e18851c/watchfiles-0.23.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5e7803a65eb2d563c73230e9d693c6539e3c975ccfe62526cadde69f3fda0cf", size = 437615 }, + { url = "https://files.pythonhosted.org/packages/34/2c/c279de01628f467d16b444bdcedf9c4ce3bc5242cb23f9bfb8fbff8522ee/watchfiles-0.23.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1aa4cc85202956d1a65c88d18c7b687b8319dbe6b1aec8969784ef7a10e7d1a", size = 456227 }, + { url = "https://files.pythonhosted.org/packages/a4/9f/a3c9f1fbcd1099554e4f707e14473ff23f0e05013d553755b98c2d86716d/watchfiles-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87f889f6e58849ddb7c5d2cb19e2e074917ed1c6e3ceca50405775166492cca8", size = 472219 }, + { url = "https://files.pythonhosted.org/packages/22/ee/06a0a6cbde8ac6fff57c33da9e428f42dd0989e60a6ad72ca6534f650a47/watchfiles-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37fd826dac84c6441615aa3f04077adcc5cac7194a021c9f0d69af20fb9fa788", size = 479948 }, + { url = "https://files.pythonhosted.org/packages/b9/f0/76ad5227da9461b1190de2f9dd21fece09660a9a44607de9c728f3d3e93f/watchfiles-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee7db6e36e7a2c15923072e41ea24d9a0cf39658cb0637ecc9307b09d28827e1", size = 427559 }, + { url = "https://files.pythonhosted.org/packages/e1/15/daf4361e0a6e6b27f516aaaacbb16baa8d1a266657b2314862fc73f2deaf/watchfiles-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2368c5371c17fdcb5a2ea71c5c9d49f9b128821bfee69503cc38eae00feb3220", size = 616447 }, + { url = "https://files.pythonhosted.org/packages/b3/e4/2647ca9aaa072e139a4cc6c83c8a15d2f8fa6740913903ab998917a5ed97/watchfiles-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:857af85d445b9ba9178db95658c219dbd77b71b8264e66836a6eba4fbf49c320", size = 598031 }, + { url = "https://files.pythonhosted.org/packages/3d/02/f223537cd0e3c22df45629710b27b7f89fdf4114be2f3399b83faedf1446/watchfiles-0.23.0-cp311-none-win32.whl", hash = "sha256:1d636c8aeb28cdd04a4aa89030c4b48f8b2954d8483e5f989774fa441c0ed57b", size = 264354 }, + { url = "https://files.pythonhosted.org/packages/03/31/c1b5ea92100d9774f5a8a89115a43ef1c4fb169b643b6cc930e0cd2c5728/watchfiles-0.23.0-cp311-none-win_amd64.whl", hash = "sha256:46f1d8069a95885ca529645cdbb05aea5837d799965676e1b2b1f95a4206313e", size = 275821 }, + { url = "https://files.pythonhosted.org/packages/23/9c/810ede8d4dff7e65393b50cbb1a3ef10b6cdb1312a97d8106712175355c8/watchfiles-0.23.0-cp311-none-win_arm64.whl", hash = "sha256:e495ed2a7943503766c5d1ff05ae9212dc2ce1c0e30a80d4f0d84889298fa304", size = 266906 }, + { url = "https://files.pythonhosted.org/packages/61/52/85cdf326a53f1ae3fbe5dcab13f5729ca91ec2d61140e095a2a4cdf6a9ca/watchfiles-0.23.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1db691bad0243aed27c8354b12d60e8e266b75216ae99d33e927ff5238d270b5", size = 373314 }, + { url = "https://files.pythonhosted.org/packages/20/5e/a97417a6544615b21c7960a45aeea13e3b42779e0ed3ebdd2d76ad62ab50/watchfiles-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62d2b18cb1edaba311fbbfe83fb5e53a858ba37cacb01e69bc20553bb70911b8", size = 368915 }, + { url = "https://files.pythonhosted.org/packages/bc/82/537945ed624af6248c9820a99cbfd5902bb5e6a71a01a5b3de0c00f1872e/watchfiles-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e087e8fdf1270d000913c12e6eca44edd02aad3559b3e6b8ef00f0ce76e0636f", size = 441495 }, + { url = "https://files.pythonhosted.org/packages/28/24/060b064f28083866d916052fcced5c3547c5081a8e27b0702434666aa9a0/watchfiles-0.23.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd41d5c72417b87c00b1b635738f3c283e737d75c5fa5c3e1c60cd03eac3af77", size = 437357 }, + { url = "https://files.pythonhosted.org/packages/b6/00/ac760f3fa8d8975dbeaef9af99b21077e7c38898ac5051c8601649d86d99/watchfiles-0.23.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5f3ca0ff47940ce0a389457b35d6df601c317c1e1a9615981c474452f98de1", size = 456584 }, + { url = "https://files.pythonhosted.org/packages/f7/52/2f7bbedc5f524d2ba0e9d792dab01ef4418d0f5045a9f5f4e5aca142a30d/watchfiles-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6991e3a78f642368b8b1b669327eb6751439f9f7eaaa625fae67dd6070ecfa0b", size = 471863 }, + { url = "https://files.pythonhosted.org/packages/b1/64/a80f51cb55c967629930682bf120d5ca9d1c65077c38328be635ed0d567c/watchfiles-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f7252f52a09f8fa5435dc82b6af79483118ce6bd51eb74e6269f05ee22a7b9f", size = 478307 }, + { url = "https://files.pythonhosted.org/packages/03/f1/fdacfdbffb0635a7d0140ecca6ef7b5bce6566a085f76a65eb796ee54ddd/watchfiles-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e01bcb8d767c58865207a6c2f2792ad763a0fe1119fb0a430f444f5b02a5ea0", size = 427117 }, + { url = "https://files.pythonhosted.org/packages/d1/23/89b2bef692c350de8a4c2bde501fdf6087889a55f52a3201f0c53b616087/watchfiles-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e56fbcdd27fce061854ddec99e015dd779cae186eb36b14471fc9ae713b118c", size = 616352 }, + { url = "https://files.pythonhosted.org/packages/2c/35/a683945181a527083a1146620997b5d6ffe06d716c4497d388bfea813f0c/watchfiles-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bd3e2d64500a6cad28bcd710ee6269fbeb2e5320525acd0cfab5f269ade68581", size = 597165 }, + { url = "https://files.pythonhosted.org/packages/9e/9b/ec2eabc996e5332fc89c633fbe762e08a58a7df6b5e595dd458c5f7778a4/watchfiles-0.23.0-cp312-none-win32.whl", hash = "sha256:eb99c954291b2fad0eff98b490aa641e128fbc4a03b11c8a0086de8b7077fb75", size = 264293 }, + { url = "https://files.pythonhosted.org/packages/e0/3a/62add8d90070f4b17f8bbfd66c9eaa9e08af3bc4020c07a9400d1b959aaf/watchfiles-0.23.0-cp312-none-win_amd64.whl", hash = "sha256:dccc858372a56080332ea89b78cfb18efb945da858fabeb67f5a44fa0bcb4ebb", size = 275514 }, + { url = "https://files.pythonhosted.org/packages/e8/9a/2792d4c24105104bfaf959bffefb09e02d14050913a83242ce4eb1e3f2ff/watchfiles-0.23.0-cp312-none-win_arm64.whl", hash = "sha256:6c21a5467f35c61eafb4e394303720893066897fca937bade5b4f5877d350ff8", size = 266607 }, + { url = "https://files.pythonhosted.org/packages/f6/5b/1a1d9bca4eae8cf191e74b62cd970f4a010f56f897c11dd2e6caef3ce7e3/watchfiles-0.23.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ba31c32f6b4dceeb2be04f717811565159617e28d61a60bb616b6442027fd4b9", size = 372999 }, + { url = "https://files.pythonhosted.org/packages/98/e1/76ad010c0a2bb6efbb80383c0bba56db065238f12b0da6e6026b4e69f6aa/watchfiles-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85042ab91814fca99cec4678fc063fb46df4cbb57b4835a1cc2cb7a51e10250e", size = 368511 }, + { url = "https://files.pythonhosted.org/packages/a1/13/d2d59d545b84fd3cf4f08b69da358209b4276c2c932d060d94a421015074/watchfiles-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24655e8c1c9c114005c3868a3d432c8aa595a786b8493500071e6a52f3d09217", size = 441063 }, + { url = "https://files.pythonhosted.org/packages/4b/d1/dab28bed3bc9172d44100e5fae8107bd01ef85fc6bddb80d223d0d9f709f/watchfiles-0.23.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b1a950ab299a4a78fd6369a97b8763732bfb154fdb433356ec55a5bce9515c1", size = 436805 }, + { url = "https://files.pythonhosted.org/packages/06/9c/46e0d17853b62b5d4bf8095e7b9bb0b0ad4babb6c6133138929473f161f3/watchfiles-0.23.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8d3c5cd327dd6ce0edfc94374fb5883d254fe78a5e9d9dfc237a1897dc73cd1", size = 456411 }, + { url = "https://files.pythonhosted.org/packages/2c/ff/e891b230bcf3a648352a00b920d4a1142a938f0b97c9e8e27c2eaaeda221/watchfiles-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ff785af8bacdf0be863ec0c428e3288b817e82f3d0c1d652cd9c6d509020dd0", size = 471563 }, + { url = "https://files.pythonhosted.org/packages/0b/07/f5b54afa8b7c33386c5778d92e681562939900f4ee1c6de9bffc49e7221f/watchfiles-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b7ba9d4557149410747353e7325010d48edcfe9d609a85cb450f17fd50dc3d", size = 478385 }, + { url = "https://files.pythonhosted.org/packages/a3/b6/243c1dd351ac9b8258a3ea99c33d04ecdc9766e6c7f13a43452883e92a7a/watchfiles-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a1b05c0afb2cd2f48c1ed2ae5487b116e34b93b13074ed3c22ad5c743109f0", size = 427485 }, + { url = "https://files.pythonhosted.org/packages/28/8a/6d00aa4aa9a9938de645c1d411e3af82e74db8d25a0c05427b7a88b4d8d3/watchfiles-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:109a61763e7318d9f821b878589e71229f97366fa6a5c7720687d367f3ab9eef", size = 615839 }, + { url = "https://files.pythonhosted.org/packages/5a/d9/120d212d2952342e2c9673096f5c17cd48e90a7c9ff203ab1ad2f974befe/watchfiles-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9f8e6bb5ac007d4a4027b25f09827ed78cbbd5b9700fd6c54429278dacce05d1", size = 596603 }, + { url = "https://files.pythonhosted.org/packages/3b/25/ec3676b140a93ac256d058a6f82810cf5e0e42fd444b948c62bc56f57f52/watchfiles-0.23.0-cp313-none-win32.whl", hash = "sha256:f46c6f0aec8d02a52d97a583782d9af38c19a29900747eb048af358a9c1d8e5b", size = 263898 }, + { url = "https://files.pythonhosted.org/packages/1a/c6/bf3b8cbe6944499fbe0d400175560a200cdecadccbacc8ace74486565d74/watchfiles-0.23.0-cp313-none-win_amd64.whl", hash = "sha256:f449afbb971df5c6faeb0a27bca0427d7b600dd8f4a068492faec18023f0dcff", size = 275220 }, + { url = "https://files.pythonhosted.org/packages/f7/7c/135a60260dd055227eb3b38f0be5fc16409ad58c5c6636467b27991fd863/watchfiles-0.23.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9265cf87a5b70147bfb2fec14770ed5b11a5bb83353f0eee1c25a81af5abfe", size = 376161 }, + { url = "https://files.pythonhosted.org/packages/5c/25/6511ed7bc826ddc2a4e879cf469621a1184719e97d63e7f723e95991ebd3/watchfiles-0.23.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9f02a259fcbbb5fcfe7a0805b1097ead5ba7a043e318eef1db59f93067f0b49b", size = 369829 }, + { url = "https://files.pythonhosted.org/packages/73/d3/00d561a66aa000251ed598f576e8bfd1c4102f9956fc06310e9b53258d3e/watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebaebb53b34690da0936c256c1cdb0914f24fb0e03da76d185806df9328abed", size = 443386 }, + { url = "https://files.pythonhosted.org/packages/00/24/1c089457e39a0e6a142df8cb795a690b71f05c948bc60df4ec12359956b8/watchfiles-0.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd257f98cff9c6cb39eee1a83c7c3183970d8a8d23e8cf4f47d9a21329285cee", size = 429214 }, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, +] + +[[package]] +name = "weaviate-client" +version = "4.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "authlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio-health-checking", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "grpcio-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "httpx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "validators", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/1e/68cd8306f0c9bf617c79096316e6a4a59edc3a6c2e1171daab661fd0cf0e/weaviate_client-4.7.1.tar.gz", hash = "sha256:af99ac4e53613d2ff5b797372e95d004d0c8a1dd10a7f592068bcb423a30af30", size = 676797 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/75/5c90f12d228ed9d02a3a4447f5f769e966d9885b6d0d0276ecf1f73a4609/weaviate_client-4.7.1-py3-none-any.whl", hash = "sha256:342f5c67b126cee4dc3a60467ad1ae74971cd5614e27af6fb13d687a345352c4", size = 368293 }, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 }, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826 }, +] + +[[package]] +name = "websockets" +version = "13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/b0/e53bdd53d86447d211694f3cf66f163d077c5d68e6bcaa726bf64e88ae3a/websockets-13.0.tar.gz", hash = "sha256:b7bf950234a482b7461afdb2ec99eee3548ec4d53f418c7990bb79c620476602", size = 147622 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/cc/8b3007ecf2d4e423251b2b3606c276e3fe85298982fc4fd0785a17b73ffb/websockets-13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ad4fa707ff9e2ffee019e946257b5300a45137a58f41fbd9a4db8e684ab61528", size = 150919 }, + { url = "https://files.pythonhosted.org/packages/1b/17/44553bd98608378b0d17432431a0f8f4633a6799826418f93ac036125000/websockets-13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6fd757f313c13c34dae9f126d3ba4cf97175859c719e57c6a614b781c86b617e", size = 148574 }, + { url = "https://files.pythonhosted.org/packages/ee/38/ac6d8f50dc8ac81c29036d6d26aafae3fcbb43cfe88e8bc35a0e6af24525/websockets-13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cbac2eb7ce0fac755fb983c9247c4a60c4019bcde4c0e4d167aeb17520cc7ef1", size = 148830 }, + { url = "https://files.pythonhosted.org/packages/6e/6e/b831097bb1843200d8636245f45fb8daaf4512329e8036f0f0b7ecd80f1c/websockets-13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4b83cf7354cbbc058e97b3e545dceb75b8d9cf17fd5a19db419c319ddbaaf7a", size = 157909 }, + { url = "https://files.pythonhosted.org/packages/e0/d9/4ceef7fb370eca3c33d02966e972c08ef49073199ad02ef9f0f9f2f6f107/websockets-13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9202c0010c78fad1041e1c5285232b6508d3633f92825687549540a70e9e5901", size = 156920 }, + { url = "https://files.pythonhosted.org/packages/b0/b1/8fb8bfad33f01d9085934c39bf5171c372edebed4c5440b28cb3270c0d56/websockets-13.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6566e79c8c7cbea75ec450f6e1828945fc5c9a4769ceb1c7b6e22470539712", size = 157233 }, + { url = "https://files.pythonhosted.org/packages/ad/0a/1dbe4f15cb2fc6d2efea9e7c55651102dc52a10d34d322c0af8d332592be/websockets-13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e7fcad070dcd9ad37a09d89a4cbc2a5e3e45080b88977c0da87b3090f9f55ead", size = 157632 }, + { url = "https://files.pythonhosted.org/packages/9f/97/4e7e98b694ef3db9a9776cbc4f72121cf408c47d7bc1ec582cfd9fa16de1/websockets-13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8f7d65358a25172db00c69bcc7df834155ee24229f560d035758fd6613111a", size = 157049 }, + { url = "https://files.pythonhosted.org/packages/4f/8c/50c0b58e99a7dc19282b706b99316327380065d8b2325aa0c7ae0479a98a/websockets-13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:63b702fb31e3f058f946ccdfa551f4d57a06f7729c369e8815eb18643099db37", size = 157001 }, + { url = "https://files.pythonhosted.org/packages/a4/6f/cf602a9addf38396a2543bcd2c120651324169c0e88aa68a86a9b1e1f648/websockets-13.0-cp310-cp310-win32.whl", hash = "sha256:3a20cf14ba7b482c4a1924b5e061729afb89c890ca9ed44ac4127c6c5986e424", size = 151754 }, + { url = "https://files.pythonhosted.org/packages/d0/bf/4a0bab951456884638c9bbf4a6b5314e755217632be8da831f8b3c6d3954/websockets-13.0-cp310-cp310-win_amd64.whl", hash = "sha256:587245f0704d0bb675f919898d7473e8827a6d578e5a122a21756ca44b811ec8", size = 152192 }, + { url = "https://files.pythonhosted.org/packages/12/29/9fdf8a7f1ced2bac55d36e0b879991498c9858f1e524763434025948d254/websockets-13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:06df8306c241c235075d2ae77367038e701e53bc8c1bb4f6644f4f53aa6dedd0", size = 150915 }, + { url = "https://files.pythonhosted.org/packages/b9/27/723276e7fcb41a3e0859e347014e3e24637982a29222132746b98095ec02/websockets-13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85a1f92a02f0b8c1bf02699731a70a8a74402bb3f82bee36e7768b19a8ed9709", size = 148575 }, + { url = "https://files.pythonhosted.org/packages/04/54/39b1f809e34f78ebb1dcb9cf57465db9705bbf59f30bd1b3b381272dff2b/websockets-13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9ed02c604349068d46d87ef4c2012c112c791f2bec08671903a6bb2bd9c06784", size = 148825 }, + { url = "https://files.pythonhosted.org/packages/fe/df/0a8a90162c32ceb9f28415291c1d689310b503288d29169302964105a351/websockets-13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b89849171b590107f6724a7b0790736daead40926ddf47eadf998b4ff51d6414", size = 158482 }, + { url = "https://files.pythonhosted.org/packages/20/05/227dbb1861cd1e2eb04ac79b136da841dbf6f196e4dc0bd1e67edb4ee69d/websockets-13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:939a16849d71203628157a5e4a495da63967c744e1e32018e9b9e2689aca64d4", size = 157478 }, + { url = "https://files.pythonhosted.org/packages/fe/dd/3384d3eb26022703895d6ed65aec2d3af6976c3d9aed06200a322e7192cb/websockets-13.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad818cdac37c0ad4c58e51cb4964eae4f18b43c4a83cb37170b0d90c31bd80cf", size = 157855 }, + { url = "https://files.pythonhosted.org/packages/93/ad/0320a24cd8309e1a257d43d762a732162f2956b769c1ad950b70d4d4d15a/websockets-13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cbfe82a07596a044de78bb7a62519e71690c5812c26c5f1d4b877e64e4f46309", size = 158160 }, + { url = "https://files.pythonhosted.org/packages/d0/33/acc24e576228301d1dc23ce9d3f7d20f51dfe6c16d1b241e6ba4b2904d3e/websockets-13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e07e76c49f39c5b45cbd7362b94f001ae209a3ea4905ae9a09cfd53b3c76373d", size = 157598 }, + { url = "https://files.pythonhosted.org/packages/83/47/01645a0ea041e32a9d8946a324845beb8daba2e2f00ee4fd2d04d3ceb598/websockets-13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:372f46a0096cfda23c88f7e42349a33f8375e10912f712e6b496d3a9a557290f", size = 157548 }, + { url = "https://files.pythonhosted.org/packages/73/89/ea73bc41934eb3ea3f0c04fa7b16455ec5925b8b72aa5e016bd22df5feb5/websockets-13.0-cp311-cp311-win32.whl", hash = "sha256:376a43a4fd96725f13450d3d2e98f4f36c3525c562ab53d9a98dd2950dca9a8a", size = 151756 }, + { url = "https://files.pythonhosted.org/packages/9b/b1/81f655476532b31c39814d55a1dc1e97ecedc5a1b4f9517ee665aec398f6/websockets-13.0-cp311-cp311-win_amd64.whl", hash = "sha256:2be1382a4daa61e2f3e2be3b3c86932a8db9d1f85297feb6e9df22f391f94452", size = 152200 }, + { url = "https://files.pythonhosted.org/packages/ad/0a/baeea2931827e73ebe3d958fad9df74ec66d08341d0cf701ced0381adc91/websockets-13.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5407c34776b9b77bd89a5f95eb0a34aaf91889e3f911c63f13035220eb50107", size = 150928 }, + { url = "https://files.pythonhosted.org/packages/6d/f7/306e2940829db34c5866e869eb5b1a08dd04d1c6d25c71327a028d124871/websockets-13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4782ec789f059f888c1e8fdf94383d0e64b531cffebbf26dd55afd53ab487ca4", size = 148585 }, + { url = "https://files.pythonhosted.org/packages/2b/3c/183a4f79e0ce6be8733f824e0a48db3771a373a7206aef900bc1ae4c176e/websockets-13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c8feb8e19ef65c9994e652c5b0324abd657bedd0abeb946fb4f5163012c1e730", size = 148821 }, + { url = "https://files.pythonhosted.org/packages/03/32/37e1c9dd9aa1e7fa6fb3147d6992d61a20ba63ffee2adc88a392e1ae7376/websockets-13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f3d2e20c442b58dbac593cb1e02bc02d149a86056cc4126d977ad902472e3b", size = 158746 }, + { url = "https://files.pythonhosted.org/packages/6c/da/0cace6358289c7de1ee02ed0d572dfe92e5cb97270bda60f04a4e49ac5c5/websockets-13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39d393e0ab5b8bd01717cc26f2922026050188947ff54fe6a49dc489f7750b7", size = 157699 }, + { url = "https://files.pythonhosted.org/packages/c7/ab/b763b0e8598c4251ec6e17d18f46cbced157772b991200fb0d32550844c5/websockets-13.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f661a4205741bdc88ac9c2b2ec003c72cee97e4acd156eb733662ff004ba429", size = 158124 }, + { url = "https://files.pythonhosted.org/packages/d0/2d/40b8c3ba08792c2ecdb81613671a4b9bd33b83c50519b235e8eeb0ae21a0/websockets-13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:384129ad0490e06bab2b98c1da9b488acb35bb11e2464c728376c6f55f0d45f3", size = 158415 }, + { url = "https://files.pythonhosted.org/packages/4c/5e/9a42db20f6c38d247a900bfb8633953df93d8873a99ed9432645a4d5e185/websockets-13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:df5c0eff91f61b8205a6c9f7b255ff390cdb77b61c7b41f79ca10afcbb22b6cb", size = 157795 }, + { url = "https://files.pythonhosted.org/packages/87/52/7fb5f052eefaa5d2b42da06b314c2af0467fadbd7f360716a1a4d4f7ab67/websockets-13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:02cc9bb1a887dac0e08bf657c5d00aa3fac0d03215d35a599130c2034ae6663a", size = 157791 }, + { url = "https://files.pythonhosted.org/packages/9c/8b/4b7064d1a40fcb85f64bc051d8bdc8a9e388572eb5bec5cb85ffb2c43e01/websockets-13.0-cp312-cp312-win32.whl", hash = "sha256:d9726d2c9bd6aed8cb994d89b3910ca0079406edce3670886ec828a73e7bdd53", size = 151765 }, + { url = "https://files.pythonhosted.org/packages/8b/a3/297207726b292e85b9a8ce24ef6ab16a056c457100e915a67b6928a58fa9/websockets-13.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0839f35322f7b038d8adcf679e2698c3a483688cc92e3bd15ee4fb06669e9a", size = 152202 }, + { url = "https://files.pythonhosted.org/packages/03/b6/778678e1ff104df3a869dacb0bc845df34d74f2ff7451f99babccd212203/websockets-13.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:da7e501e59857e8e3e9d10586139dc196b80445a591451ca9998aafba1af5278", size = 150936 }, + { url = "https://files.pythonhosted.org/packages/fa/25/28609b2555f11e4913a4021147b7a7c5117b5c41da5d26a604a91bae85b9/websockets-13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a00e1e587c655749afb5b135d8d3edcfe84ec6db864201e40a882e64168610b3", size = 148590 }, + { url = "https://files.pythonhosted.org/packages/cb/1f/e06fb15fde90683fd98e6ca44fb54fe579161ce553d54fdbb578014ae1a7/websockets-13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a7fbf2a8fe7556a8f4e68cb3e736884af7bf93653e79f6219f17ebb75e97d8f0", size = 148826 }, + { url = "https://files.pythonhosted.org/packages/22/00/9892eee346f44cd814c18888bc1a05880e3f8091e4eb999e6b34634cd278/websockets-13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ea9c9c7443a97ea4d84d3e4d42d0e8c4235834edae652993abcd2aff94affd7", size = 158717 }, + { url = "https://files.pythonhosted.org/packages/dc/ad/2bdc3a5dd60b639e0f8e76ee4a57fda27abaf05f604708c61c6fd7f8ad88/websockets-13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35c2221b539b360203f3f9ad168e527bf16d903e385068ae842c186efb13d0ea", size = 157660 }, + { url = "https://files.pythonhosted.org/packages/0c/14/5585de16939608b77a37f8b88e1bd1d430d95ec19d3a8c26ec42a91f2815/websockets-13.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:358d37c5c431dd050ffb06b4b075505aae3f4f795d7fff9794e5ed96ce99b998", size = 158104 }, + { url = "https://files.pythonhosted.org/packages/7b/1e/6cd9063fd34fe7f649ed9a56d3c91e80dea95cf3ab3344203ee774d51a56/websockets-13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:038e7a0f1bfafc7bf52915ab3506b7a03d1e06381e9f60440c856e8918138151", size = 158463 }, + { url = "https://files.pythonhosted.org/packages/d9/4d/c3282f8e54103f3d38b5e56851d00911dafd0c37c8d03a9ecc7a25f2a9da/websockets-13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fd038bc9e2c134847f1e0ce3191797fad110756e690c2fdd9702ed34e7a43abb", size = 157850 }, + { url = "https://files.pythonhosted.org/packages/a1/08/af4f67b74cc6891ee1c34a77b47a3cb77081b824c3df92c1196980df9a4f/websockets-13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93b8c2008f372379fb6e5d2b3f7c9ec32f7b80316543fd3a5ace6610c5cde1b0", size = 157843 }, + { url = "https://files.pythonhosted.org/packages/b4/b7/2c991e51d48b1b98847d0a0b608508a3b687f215a2390f99cf0ee7dd2777/websockets-13.0-cp313-cp313-win32.whl", hash = "sha256:851fd0afb3bc0b73f7c5b5858975d42769a5fdde5314f4ef2c106aec63100687", size = 151763 }, + { url = "https://files.pythonhosted.org/packages/bc/0f/f06ed6485cf9cdea7d89c2f6e9d19f1be963ba5d26fb79760bfd17dd4aa5/websockets-13.0-cp313-cp313-win_amd64.whl", hash = "sha256:7d14901fdcf212804970c30ab9ee8f3f0212e620c7ea93079d6534863444fb4e", size = 152197 }, + { url = "https://files.pythonhosted.org/packages/e0/1e/f7260a625b210f8242d0d858a3006a54b632843b796db39d9deb90068031/websockets-13.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:602cbd010d8c21c8475f1798b705bb18567eb189c533ab5ef568bc3033fdf417", size = 148603 }, + { url = "https://files.pythonhosted.org/packages/b7/b6/3462a3a2688a62ee52aa1555fd47c61ffad0b12d0ed6ccdefd1ef8c3eef4/websockets-13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:bf8eb5dca4f484a60f5327b044e842e0d7f7cdbf02ea6dc4a4f811259f1f1f0b", size = 148837 }, + { url = "https://files.pythonhosted.org/packages/ca/74/9f7c4669c5b5e154384eace44a5a3e24609c230f1428fea6b9af257a66c5/websockets-13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89d795c1802d99a643bf689b277e8604c14b5af1bc0a31dade2cd7a678087212", size = 150200 }, + { url = "https://files.pythonhosted.org/packages/c0/33/a307018b358f5cca141497e95f9af19c3e8be748219773afc4fcd4791123/websockets-13.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:788bc841d250beccff67a20a5a53a15657a60111ef9c0c0a97fbdd614fae0fe2", size = 149804 }, + { url = "https://files.pythonhosted.org/packages/d9/62/c514d5b087f7b2cab8d97c80213d7ee8196b5954f8466886146c09d4fc46/websockets-13.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7334752052532c156d28b8eaf3558137e115c7871ea82adff69b6d94a7bee273", size = 149754 }, + { url = "https://files.pythonhosted.org/packages/f4/d7/b11dd0a18b9bd876158c463ac1a6cab7b1b38093866fce22d03ab5462258/websockets-13.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7a1963302947332c3039e3f66209ec73b1626f8a0191649e0713c391e9f5b0d", size = 152235 }, + { url = "https://files.pythonhosted.org/packages/b2/89/c0be9f09eea478659e9d936210ff03e6a2a3a8d4b8dfac6b1143ff646ded/websockets-13.0-py3-none-any.whl", hash = "sha256:dbbac01e80aee253d44c4f098ab3cc17c822518519e869b284cfbb8cd16cc9de", size = 142957 }, +] + +[[package]] +name = "werkzeug" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/51/2e0fc149e7a810d300422ab543f87f2bcf64d985eb6f1228c4efd6e4f8d4/werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18", size = 803342 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/6e/e792999e816d19d7fcbfa94c730936750036d65656a76a5a688b57a656c4/werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8", size = 227274 }, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/4c/063a912e20bcef7124e0df97282a8af3ff3e4b603ce84c481d6d7346be0a/wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", size = 53972 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/c6/5375258add3777494671d8cec27cdf5402abd91016dee24aa2972c61fedf/wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4", size = 37315 }, + { url = "https://files.pythonhosted.org/packages/32/12/e11adfde33444986135d8881b401e4de6cbb4cced046edc6b464e6ad7547/wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020", size = 38160 }, + { url = "https://files.pythonhosted.org/packages/70/7d/3dcc4a7e96f8d3e398450ec7703db384413f79bd6c0196e0e139055ce00f/wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440", size = 80419 }, + { url = "https://files.pythonhosted.org/packages/d1/c4/8dfdc3c2f0b38be85c8d9fdf0011ebad2f54e40897f9549a356bebb63a97/wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487", size = 72669 }, + { url = "https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf", size = 80271 }, + { url = "https://files.pythonhosted.org/packages/19/d4/cd33d3a82df73a064c9b6401d14f346e1d2fb372885f0295516ec08ed2ee/wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72", size = 84748 }, + { url = "https://files.pythonhosted.org/packages/ef/58/2fde309415b5fa98fd8f5f4a11886cbf276824c4c64d45a39da342fff6fe/wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0", size = 77522 }, + { url = "https://files.pythonhosted.org/packages/07/44/359e4724a92369b88dbf09878a7cde7393cf3da885567ea898e5904049a3/wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136", size = 84780 }, + { url = "https://files.pythonhosted.org/packages/88/8f/706f2fee019360cc1da652353330350c76aa5746b4e191082e45d6838faf/wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d", size = 35335 }, + { url = "https://files.pythonhosted.org/packages/19/2b/548d23362e3002ebbfaefe649b833fa43f6ca37ac3e95472130c4b69e0b4/wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2", size = 37528 }, + { url = "https://files.pythonhosted.org/packages/fd/03/c188ac517f402775b90d6f312955a5e53b866c964b32119f2ed76315697e/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", size = 37313 }, + { url = "https://files.pythonhosted.org/packages/0f/16/ea627d7817394db04518f62934a5de59874b587b792300991b3c347ff5e0/wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", size = 38164 }, + { url = "https://files.pythonhosted.org/packages/7f/a7/f1212ba098f3de0fd244e2de0f8791ad2539c03bef6c05a9fcb03e45b089/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", size = 80890 }, + { url = "https://files.pythonhosted.org/packages/b7/96/bb5e08b3d6db003c9ab219c487714c13a237ee7dcc572a555eaf1ce7dc82/wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", size = 73118 }, + { url = "https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", size = 80746 }, + { url = "https://files.pythonhosted.org/packages/11/fb/18ec40265ab81c0e82a934de04596b6ce972c27ba2592c8b53d5585e6bcd/wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", size = 85668 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/0ecb1fa23145560431b970418dce575cfaec555ab08617d82eb92afc7ccf/wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", size = 78556 }, + { url = "https://files.pythonhosted.org/packages/25/62/cd284b2b747f175b5a96cbd8092b32e7369edab0644c45784871528eb852/wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", size = 85712 }, + { url = "https://files.pythonhosted.org/packages/e5/a7/47b7ff74fbadf81b696872d5ba504966591a3468f1bc86bca2f407baef68/wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", size = 35327 }, + { url = "https://files.pythonhosted.org/packages/cf/c3/0084351951d9579ae83a3d9e38c140371e4c6b038136909235079f2e6e78/wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", size = 37523 }, + { url = "https://files.pythonhosted.org/packages/92/17/224132494c1e23521868cdd57cd1e903f3b6a7ba6996b7b8f077ff8ac7fe/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", size = 37614 }, + { url = "https://files.pythonhosted.org/packages/6a/d7/cfcd73e8f4858079ac59d9db1ec5a1349bc486ae8e9ba55698cc1f4a1dff/wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", size = 38316 }, + { url = "https://files.pythonhosted.org/packages/7e/79/5ff0a5c54bda5aec75b36453d06be4f83d5cd4932cc84b7cb2b52cee23e2/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", size = 86322 }, + { url = "https://files.pythonhosted.org/packages/c4/81/e799bf5d419f422d8712108837c1d9bf6ebe3cb2a81ad94413449543a923/wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", size = 79055 }, + { url = "https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", size = 87291 }, + { url = "https://files.pythonhosted.org/packages/49/4e/5d2f6d7b57fc9956bf06e944eb00463551f7d52fc73ca35cfc4c2cdb7aed/wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", size = 90374 }, + { url = "https://files.pythonhosted.org/packages/a6/9b/c2c21b44ff5b9bf14a83252a8b973fb84923764ff63db3e6dfc3895cf2e0/wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", size = 83896 }, + { url = "https://files.pythonhosted.org/packages/14/26/93a9fa02c6f257df54d7570dfe8011995138118d11939a4ecd82cb849613/wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", size = 91738 }, + { url = "https://files.pythonhosted.org/packages/a2/5b/4660897233eb2c8c4de3dc7cefed114c61bacb3c28327e64150dc44ee2f6/wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", size = 35568 }, + { url = "https://files.pythonhosted.org/packages/5c/cc/8297f9658506b224aa4bd71906447dea6bb0ba629861a758c28f67428b91/wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", size = 37653 }, + { url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362 }, +] + +[[package]] +name = "yarl" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "multidict", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/ad/bedcdccbcbf91363fd425a948994f3340924145c2bc8ccb296f4a1e52c28/yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf", size = 141869 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/27/cda5a927df3a894eddfee4efacdd230c2d8486e322fc672194fd651f82c5/yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e", size = 129061 }, + { url = "https://files.pythonhosted.org/packages/d5/fc/40b85bea1f5686092ea37f472c94c023d6347266852ffd55baa01c40f596/yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2", size = 81246 }, + { url = "https://files.pythonhosted.org/packages/81/c6/06938036ea48fa74521713499fba1459b0eb60af9b9afbe8e0e9e1a96c36/yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66", size = 79176 }, + { url = "https://files.pythonhosted.org/packages/30/b5/215d586d5cb17ca9748d7a2d597c07147f210c0c0785257492094d083b65/yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234", size = 297669 }, + { url = "https://files.pythonhosted.org/packages/dd/90/2958ae9f2e12084d616eef95b6a48c8e6d96448add04367c20dc53a33ff2/yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392", size = 311909 }, + { url = "https://files.pythonhosted.org/packages/0b/58/dd3c69651381a57ac991dba54b20ae2da359eb4b03a661e71c451d6525c6/yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551", size = 308690 }, + { url = "https://files.pythonhosted.org/packages/c3/a0/0ade1409d184cbc9e85acd403a386a7c0563b92ff0f26d138ff9e86e48b4/yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455", size = 301580 }, + { url = "https://files.pythonhosted.org/packages/6d/a1/db0bdf8cc48515e9c02daf04ae2916fc27ce6498eca21432fc9ffa63f71b/yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c", size = 291231 }, + { url = "https://files.pythonhosted.org/packages/b2/4f/796b0c73e9ff30a1047a7ee3390e157ab8424d4401b9f32a2624013a5b39/yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53", size = 301079 }, + { url = "https://files.pythonhosted.org/packages/0b/a3/7774786ec6e2dca0bb38b286f12a11af97957546e5fbcce71752a8d2cf07/yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385", size = 295202 }, + { url = "https://files.pythonhosted.org/packages/70/a9/ef6d69ce9a4e82080290bcb6db735bb8a6d6db92f2bbb92b6951bde97e7c/yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863", size = 311784 }, + { url = "https://files.pythonhosted.org/packages/44/ae/fdbc9965ef69e650c3b5b04d60badef90ff0cde21a30770f0700e148b12f/yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b", size = 311134 }, + { url = "https://files.pythonhosted.org/packages/cc/2a/abbaf1460becba856e163f2a1274f5d34b1969d476da8e68a8fc2aeb5661/yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541", size = 304584 }, + { url = "https://files.pythonhosted.org/packages/a3/73/dd7ced8d9731bd2ef4fdff5da23ce2f772ca04e8ddee886a6b15248d9e65/yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d", size = 70175 }, + { url = "https://files.pythonhosted.org/packages/31/d4/2085272a5ccf87af74d4e02787c242c5d60367840a4637b2835565264302/yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b", size = 76402 }, + { url = "https://files.pythonhosted.org/packages/12/65/4c7f3676209a569405c9f0f492df2bc3a387c253f5d906e36944fdd12277/yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099", size = 132836 }, + { url = "https://files.pythonhosted.org/packages/3b/c5/81e3dbf5271ab1510860d2ae7a704ef43f93f7cb9326bf7ebb1949a7260b/yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c", size = 83215 }, + { url = "https://files.pythonhosted.org/packages/20/3d/7dabf580dfc0b588e48830486b488858122b10a61f33325e0d7cf1d6180b/yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0", size = 81237 }, + { url = "https://files.pythonhosted.org/packages/38/45/7c669999f5d350f4f8f74369b94e0f6705918eee18e38610bfe44af93d4f/yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525", size = 324181 }, + { url = "https://files.pythonhosted.org/packages/50/49/aa04effe2876cced8867bf9d89b620acf02b733c62adfe22a8218c35d70b/yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8", size = 339412 }, + { url = "https://files.pythonhosted.org/packages/7d/95/4310771fb9c71599d8466f43347ac18fafd501621e65b93f4f4f16899b1d/yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9", size = 337973 }, + { url = "https://files.pythonhosted.org/packages/9f/ea/94ad7d8299df89844e666e4aa8a0e9b88e02416cd6a7dd97969e9eae5212/yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42", size = 328126 }, + { url = "https://files.pythonhosted.org/packages/6d/be/9d4885e2725f5860833547c9e4934b6e0f44a355b24ffc37957264761e3e/yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe", size = 316677 }, + { url = "https://files.pythonhosted.org/packages/4a/70/5c744d67cad3d093e233cb02f37f2830cb89abfcbb7ad5b5af00ff21d14d/yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce", size = 324243 }, + { url = "https://files.pythonhosted.org/packages/c2/80/8b38d8fed958ac37afb8b81a54bf4f767b107e2c2004dab165edb58fc51b/yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9", size = 318099 }, + { url = "https://files.pythonhosted.org/packages/59/50/715bbc7bda65291f9295e757f67854206f4d8be9746d39187724919ac14d/yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572", size = 334924 }, + { url = "https://files.pythonhosted.org/packages/a8/af/ca9962488027576d7162878a1864cbb1275d298af986ce96bdfd4807d7b2/yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958", size = 335060 }, + { url = "https://files.pythonhosted.org/packages/28/c7/249a3a903d500ca7369eb542e2847a14f12f249638dcc10371db50cd17ff/yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98", size = 326689 }, + { url = "https://files.pythonhosted.org/packages/ec/0c/f02dd0b875a7a460f95dc7cf18983ed43c693283d6ab92e0ad71b9e0de8f/yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31", size = 70407 }, + { url = "https://files.pythonhosted.org/packages/27/41/945ae9a80590e4fb0be166863c6e63d75e4b35789fa3a61ff1dbdcdc220f/yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1", size = 76719 }, + { url = "https://files.pythonhosted.org/packages/7b/cd/a921122610dedfed94e494af18e85aae23e93274c00ca464cfc591c8f4fb/yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81", size = 129561 }, + { url = "https://files.pythonhosted.org/packages/7c/a0/887c93020c788f249c24eaab288c46e5fed4d2846080eaf28ed3afc36e8d/yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142", size = 81595 }, + { url = "https://files.pythonhosted.org/packages/54/99/ed3c92c38f421ba6e36caf6aa91c34118771d252dce800118fa2f44d7962/yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074", size = 79400 }, + { url = "https://files.pythonhosted.org/packages/ea/45/65801be625ef939acc8b714cf86d4a198c0646e80dc8970359d080c47204/yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129", size = 317397 }, + { url = "https://files.pythonhosted.org/packages/06/91/9696601a8ba674c8f0c15035cc9e94ca31f541330364adcfd5a399f598bf/yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2", size = 327246 }, + { url = "https://files.pythonhosted.org/packages/da/3e/bf25177b3618889bf067aacf01ef54e910cd569d14e2f84f5e7bec23bb82/yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78", size = 327321 }, + { url = "https://files.pythonhosted.org/packages/28/1c/bdb3411467b805737dd2720b85fd082e49f59bf0cc12dc1dfcc80ab3d274/yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4", size = 322424 }, + { url = "https://files.pythonhosted.org/packages/41/e9/53bc89f039df2824a524a2aa03ee0bfb8f0585b08949e7521f5eab607085/yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0", size = 310868 }, + { url = "https://files.pythonhosted.org/packages/79/cd/a78c3b0304a4a970b5ae3993f4f5f649443bc8bfa5622f244aed44c810ed/yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51", size = 323452 }, + { url = "https://files.pythonhosted.org/packages/2e/5e/1c78eb05ae0efae08498fd7ab939435a29f12c7f161732e7fe327e5b8ca1/yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff", size = 313554 }, + { url = "https://files.pythonhosted.org/packages/04/e0/0029563a8434472697aebb269fdd2ffc8a19e3840add1d5fa169ec7c56e3/yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7", size = 331029 }, + { url = "https://files.pythonhosted.org/packages/de/1b/7e6b1ad42ccc0ed059066a7ae2b6fd4bce67795d109a99ccce52e9824e96/yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc", size = 333839 }, + { url = "https://files.pythonhosted.org/packages/85/8a/c364d6e2eeb4e128a5ee9a346fc3a09aa76739c0c4e2a7305989b54f174b/yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10", size = 328251 }, + { url = "https://files.pythonhosted.org/packages/ec/9d/0da94b33b9fb89041e10f95a14a55b0fef36c60b6a1d5ff85a0c2ecb1a97/yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7", size = 70195 }, + { url = "https://files.pythonhosted.org/packages/c5/f4/2fdc5a11503bc61818243653d836061c9ce0370e2dd9ac5917258a007675/yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984", size = 76397 }, + { url = "https://files.pythonhosted.org/packages/4d/05/4d79198ae568a92159de0f89e710a8d19e3fa267b719a236582eee921f4a/yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad", size = 31638 }, +] + +[[package]] +name = "zipp" +version = "3.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/af/9f2de5bd32549a1b705af7a7c054af3878816a1267cb389c03cc4f342a51/zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31", size = 23244 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/cc/b9958af9f9c86b51f846d8487440af495ecf19b16e426fce1ed0b0796175/zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d", size = 9432 }, +]